diff options
Diffstat (limited to 'deps/rabbitmq_mqtt')
68 files changed, 16363 insertions, 0 deletions
diff --git a/deps/rabbitmq_mqtt/.gitignore b/deps/rabbitmq_mqtt/.gitignore new file mode 100644 index 0000000000..7f34fc74e2 --- /dev/null +++ b/deps/rabbitmq_mqtt/.gitignore @@ -0,0 +1,24 @@ +.sw? +.*.sw? +*.beam +.idea/* +/.erlang.mk/ +/cover/ +/deps/ +/doc/ +/ebin/ +/escript/ +/escript.lock +/log/ +/logs/ +/plugins/ +/plugins.lock +/sbin/ +/sbin.lock +/xrefr +debug/* + +test/config_schema_SUITE_data/schema/ +test/.idea/* + +rabbitmq_mqtt.d diff --git a/deps/rabbitmq_mqtt/.travis.yml b/deps/rabbitmq_mqtt/.travis.yml new file mode 100644 index 0000000000..0eb9abafd6 --- /dev/null +++ b/deps/rabbitmq_mqtt/.travis.yml @@ -0,0 +1,62 @@ +# vim:sw=2:et: + +os: linux +dist: xenial +language: elixir +notifications: + email: + recipients: + - alerts@rabbitmq.com + on_success: never + on_failure: always +addons: + apt: + packages: + - awscli + - maven +cache: + apt: true +env: + global: + - secure: Jw9u8yRg1vhWLtMCSrUFTxVnBuXvy6wd+voso4GlwGjeRzyc4YzSHvCAElENULHiWDJetbJGU/D9oGBYkRnYR2nwjPu5bvW9rsZYYUKWHE3V20ddTHHX+7lU5WeTWDkFy9VlD2nUiUplCNX8zuTU9T59KaP2ifcOv1Gr47Wo02o= + - secure: ZaLpWOIx0uR/yqQRKMx8pZ0+AhJx/GIczZmJhq1m0huGhJwqdCefK5iwUwzTXXN3MX+WBv8Jj1rCnmsKqtpoNFMM3Twyl518KBOGzrzg4asPefH+pj5QNvpcmL4PKe8KuLAV0zxmFzrj9OoV8P4MAMHq83MNfjan56cd9SLjbD0= + + # $base_rmq_ref is used by rabbitmq-components.mk to select the + # appropriate branch for dependencies. + - base_rmq_ref=master + +elixir: + - '1.9' +otp_release: + - '21.3' + - '22.2' + +install: + # This project being an Erlang one (we just set language to Elixir + # to ensure it is installed), we don't want Travis to run mix(1) + # automatically as it will break. + skip + +script: + # $current_rmq_ref is also used by rabbitmq-components.mk to select + # the appropriate branch for dependencies. + - make check-rabbitmq-components.mk + current_rmq_ref="${TRAVIS_PULL_REQUEST_BRANCH:-${TRAVIS_BRANCH}}" + - make xref + current_rmq_ref="${TRAVIS_PULL_REQUEST_BRANCH:-${TRAVIS_BRANCH}}" + - make tests + current_rmq_ref="${TRAVIS_PULL_REQUEST_BRANCH:-${TRAVIS_BRANCH}}" + +after_failure: + - | + cd "$TRAVIS_BUILD_DIR" + if test -d logs && test "$AWS_ACCESS_KEY_ID" && test "$AWS_SECRET_ACCESS_KEY"; then + archive_name="$(basename "$TRAVIS_REPO_SLUG")-$TRAVIS_JOB_NUMBER" + + tar -c --transform "s/^logs/${archive_name}/" -f - logs | \ + xz > "${archive_name}.tar.xz" + + aws s3 cp "${archive_name}.tar.xz" s3://server-release-pipeline/travis-ci-logs/ \ + --region eu-west-1 \ + --acl public-read + fi diff --git a/deps/rabbitmq_mqtt/.travis.yml.patch b/deps/rabbitmq_mqtt/.travis.yml.patch new file mode 100644 index 0000000000..b7ddd9414e --- /dev/null +++ b/deps/rabbitmq_mqtt/.travis.yml.patch @@ -0,0 +1,10 @@ +--- ../rabbit_common/.travis.yml 2019-09-13 13:48:46.258483000 +0200 ++++ .travis.yml 2020-03-03 13:53:33.515196000 +0100 +@@ -13,6 +13,7 @@ + apt: + packages: + - awscli ++ - maven + cache: + apt: true + env: diff --git a/deps/rabbitmq_mqtt/CODE_OF_CONDUCT.md b/deps/rabbitmq_mqtt/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..08697906fd --- /dev/null +++ b/deps/rabbitmq_mqtt/CODE_OF_CONDUCT.md @@ -0,0 +1,44 @@ +# Contributor Code of Conduct + +As contributors and maintainers of this project, and in the interest of fostering an open +and welcoming community, we pledge to respect all people who contribute through reporting +issues, posting feature requests, updating documentation, submitting pull requests or +patches, and other activities. + +We are committed to making participation in this project a harassment-free experience for +everyone, regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, body size, race, ethnicity, age, +religion, or nationality. + +Examples of unacceptable behavior by participants include: + + * The use of sexualized language or imagery + * Personal attacks + * Trolling or insulting/derogatory comments + * Public or private harassment + * Publishing other's private information, such as physical or electronic addresses, + without explicit permission + * Other unethical or unprofessional conduct + +Project maintainers have the right and responsibility to remove, edit, or reject comments, +commits, code, wiki edits, issues, and other contributions that are not aligned to this +Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors +that they deem inappropriate, threatening, offensive, or harmful. + +By adopting this Code of Conduct, project maintainers commit themselves to fairly and +consistently applying these principles to every aspect of managing this project. Project +maintainers who do not follow or enforce the Code of Conduct may be permanently removed +from the project team. + +This Code of Conduct applies both within project spaces and in public spaces when an +individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by +contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will +be reviewed and investigated and will result in a response that is deemed necessary and +appropriate to the circumstances. Maintainers are obligated to maintain confidentiality +with regard to the reporter of an incident. + +This Code of Conduct is adapted from the +[Contributor Covenant](https://contributor-covenant.org), version 1.3.0, available at +[contributor-covenant.org/version/1/3/0/](https://contributor-covenant.org/version/1/3/0/) diff --git a/deps/rabbitmq_mqtt/CONTRIBUTING.md b/deps/rabbitmq_mqtt/CONTRIBUTING.md new file mode 100644 index 0000000000..592e7ced57 --- /dev/null +++ b/deps/rabbitmq_mqtt/CONTRIBUTING.md @@ -0,0 +1,99 @@ +Thank you for using RabbitMQ and for taking the time to contribute to the project. +This document has two main parts: + + * when and how to file GitHub issues for RabbitMQ projects + * how to submit pull requests + +They intend to save you and RabbitMQ maintainers some time, so please +take a moment to read through them. + +## Overview + +### GitHub issues + +The RabbitMQ team uses GitHub issues for _specific actionable items_ that +engineers can work on. This assumes the following: + +* GitHub issues are not used for questions, investigations, root cause + analysis, discussions of potential issues, etc (as defined by this team) +* Enough information is provided by the reporter for maintainers to work with + +The team receives many questions through various venues every single +day. Frequently, these questions do not include the necessary details +the team needs to begin useful work. GitHub issues can very quickly +turn into a something impossible to navigate and make sense +of. Because of this, questions, investigations, root cause analysis, +and discussions of potential features are all considered to be +[mailing list][rmq-users] material. If you are unsure where to begin, +the [RabbitMQ users mailing list][rmq-users] is the right place. + +Getting all the details necessary to reproduce an issue, make a +conclusion or even form a hypothesis about what's happening can take a +fair amount of time. Please help others help you by providing a way to +reproduce the behavior you're observing, or at least sharing as much +relevant information as possible on the [RabbitMQ users mailing +list][rmq-users]. + +Please provide versions of the software used: + + * RabbitMQ server + * Erlang + * Operating system version (and distribution, if applicable) + * All client libraries used + * RabbitMQ plugins (if applicable) + +The following information greatly helps in investigating and reproducing issues: + + * RabbitMQ server logs + * A code example or terminal transcript that can be used to reproduce + * Full exception stack traces (a single line message is not enough!) + * `rabbitmqctl report` and `rabbitmqctl environment` output + * Other relevant details about the environment and workload, e.g. a traffic capture + * Feel free to edit out hostnames and other potentially sensitive information. + +To make collecting much of this and other environment information, use +the [`rabbitmq-collect-env`][rmq-collect-env] script. It will produce an archive with +server logs, operating system logs, output of certain diagnostics commands and so on. +Please note that **no effort is made to scrub any information that may be sensitive**. + +### Pull Requests + +RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions. +Pull requests is the primary place of discussing code changes. + +Here's the recommended workflow: + + * [Fork the repository][github-fork] or repositories you plan on contributing to. If multiple + repositories are involved in addressing the same issue, please use the same branch name + in each repository + * Create a branch with a descriptive name in the relevant repositories + * Make your changes, run tests (usually with `make tests`), commit with a + [descriptive message][git-commit-msgs], push to your fork + * Submit pull requests with an explanation what has been changed and **why** + * Submit a filled out and signed [Contributor Agreement][ca-agreement] if needed (see below) + * Be patient. We will get to your pull request eventually + +If what you are going to work on is a substantial change, please first +ask the core team for their opinion on the [RabbitMQ users mailing list][rmq-users]. + +## Code of Conduct + +See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md). + +## Contributor Agreement + +If you want to contribute a non-trivial change, please submit a signed +copy of our [Contributor Agreement][ca-agreement] around the time you +submit your pull request. This will make it much easier (in some +cases, possible) for the RabbitMQ team at Pivotal to merge your +contribution. + +## Where to Ask Questions + +If something isn't clear, feel free to ask on our [mailing list][rmq-users]. + +[rmq-collect-env]: https://github.com/rabbitmq/support-tools/blob/master/scripts/rabbitmq-collect-env +[git-commit-msgs]: https://chris.beams.io/posts/git-commit/ +[rmq-users]: https://groups.google.com/forum/#!forum/rabbitmq-users +[ca-agreement]: https://cla.pivotal.io/sign/rabbitmq +[github-fork]: https://help.github.com/articles/fork-a-repo/ diff --git a/deps/rabbitmq_mqtt/LICENSE b/deps/rabbitmq_mqtt/LICENSE new file mode 100644 index 0000000000..f2da65d175 --- /dev/null +++ b/deps/rabbitmq_mqtt/LICENSE @@ -0,0 +1,4 @@ +This package is licensed under the MPL 2.0. For the MPL 2.0, please see LICENSE-MPL-RabbitMQ. + +If you have any questions regarding licensing, please contact us at +info@rabbitmq.com. diff --git a/deps/rabbitmq_mqtt/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_mqtt/LICENSE-MPL-RabbitMQ new file mode 100644 index 0000000000..14e2f777f6 --- /dev/null +++ b/deps/rabbitmq_mqtt/LICENSE-MPL-RabbitMQ @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/deps/rabbitmq_mqtt/Makefile b/deps/rabbitmq_mqtt/Makefile new file mode 100644 index 0000000000..1dbbfe037d --- /dev/null +++ b/deps/rabbitmq_mqtt/Makefile @@ -0,0 +1,54 @@ +PROJECT = rabbitmq_mqtt +PROJECT_DESCRIPTION = RabbitMQ MQTT Adapter +PROJECT_MOD = rabbit_mqtt + +define PROJECT_ENV +[ + {default_user, <<"guest">>}, + {default_pass, <<"guest">>}, + {ssl_cert_login,false}, + %% To satisfy an unfortunate expectation from popular MQTT clients. + {allow_anonymous, true}, + {vhost, <<"/">>}, + {exchange, <<"amq.topic">>}, + {subscription_ttl, 86400000}, %% 24 hours + {retained_message_store, rabbit_mqtt_retained_msg_store_dets}, + %% only used by DETS store + {retained_message_store_dets_sync_interval, 2000}, + {prefetch, 10}, + {ssl_listeners, []}, + {tcp_listeners, [1883]}, + {num_tcp_acceptors, 10}, + {num_ssl_acceptors, 10}, + {tcp_listen_options, [{backlog, 128}, + {nodelay, true}]}, + {proxy_protocol, false}, + {sparkplug, false} + ] +endef + +define PROJECT_APP_EXTRA_KEYS + {broker_version_requirements, []} +endef + +DEPS = ranch rabbit_common rabbit amqp_client ra +TEST_DEPS = emqttc ct_helper rabbitmq_ct_helpers rabbitmq_ct_client_helpers + +dep_ct_helper = git https://github.com/extend/ct_helper.git master +dep_emqttc = git https://github.com/rabbitmq/emqttc.git remove-logging + +DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-plugin.mk +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk + + +clean:: + if test -d test/java_SUITE_data; then cd test/java_SUITE_data && $(MAKE) clean; fi diff --git a/deps/rabbitmq_mqtt/README.md b/deps/rabbitmq_mqtt/README.md new file mode 100644 index 0000000000..f0ba5d6b03 --- /dev/null +++ b/deps/rabbitmq_mqtt/README.md @@ -0,0 +1,38 @@ +# RabbitMQ MQTT Plugin + +## Getting Started + +This is an MQTT plugin for RabbitMQ. + +The plugin is included in the RabbitMQ distribution. To enable +it, use [rabbitmq-plugins](https://www.rabbitmq.com/man/rabbitmq-plugins.1.man.html): + + rabbitmq-plugins enable rabbitmq_mqtt + +Default port used by the plugin is `1883`. + +## Documentation + +[MQTT plugin documentation](https://www.rabbitmq.com/mqtt.html) is available +from rabbitmq.com. + +## Contributing + +See [CONTRIBUTING.md](https://github.com/rabbitmq/rabbitmq-mqtt/blob/master/CONTRIBUTING.md). + +### Running Tests + +After cloning RabbitMQ umbrella repository, change into the `rabbitmq-mqtt` directory +and run + + make tests + +This will bring up a RabbitMQ node with the plugin enabled and run integration tests +against it. Note that there must be no other MQTT server running on ports `1883` and `8883`. + +## Copyright and License + +(c) 2007-2020 VMware, Inc. or its affiliates. + +Released under the [Mozilla Public License](https://www.rabbitmq.com/mpl.html), +the same as RabbitMQ. diff --git a/deps/rabbitmq_mqtt/erlang.mk b/deps/rabbitmq_mqtt/erlang.mk new file mode 100644 index 0000000000..fce4be0b0a --- /dev/null +++ b/deps/rabbitmq_mqtt/erlang.mk @@ -0,0 +1,7808 @@ +# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) +export ERLANG_MK_FILENAME + +ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0 +ERLANG_MK_WITHOUT = + +# Make 3.81 and 3.82 are deprecated. + +ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81) +$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html) +endif + +ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82) +$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html) +endif + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling +PROJECT_MOD ?= $(PROJECT)_app +PROJECT_ENV ?= [] + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +ifeq ($(V),3) +SHELL := $(SHELL) -x +endif + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +gen_verbose_esc_0 = @echo " GEN " $$@; +gen_verbose_esc_2 = set -x; +gen_verbose_esc = $(gen_verbose_esc_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A1 -noinput -boot no_dot_erlang + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +relup:: deps app + +check:: tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +$(ERLANG_MK_TMP): + $(verbose) mkdir -p $(ERLANG_MK_TMP) + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \ + " list-deps List dependencies recursively on stdout" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +define escape_dquotes +$(subst ",\",$1) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(shell cygpath -m $1) +else +core_native_path = $1 +endif + +core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2 + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +# We skip files that contain spaces because they end up causing issues. +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " ")) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +define core_render + printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT) +erlang-mk: +ifdef ERLANG_MK_COMMIT + $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) + $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +else + $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +endif + $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1 + $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR) + $(verbose) rm -rf $(ERLANG_MK_TMP) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-kerl + +KERL_INSTALL_DIR ?= $(HOME)/erlang + +ifeq ($(strip $(KERL)),) +KERL := $(ERLANG_MK_TMP)/kerl/kerl +endif + +KERL_DIR = $(ERLANG_MK_TMP)/kerl + +export KERL + +KERL_GIT ?= https://github.com/kerl/kerl +KERL_COMMIT ?= master + +KERL_MAKEFLAGS ?= + +OTP_GIT ?= https://github.com/erlang/otp + +define kerl_otp_target +$(KERL_INSTALL_DIR)/$(1): $(KERL) + $(verbose) if [ ! -d $$@ ]; then \ + MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \ + $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \ + fi +endef + +define kerl_hipe_target +$(KERL_INSTALL_DIR)/$1-native: $(KERL) + $(verbose) if [ ! -d $$@ ]; then \ + KERL_CONFIGURE_OPTIONS=--enable-native-libs \ + MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \ + $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \ + fi +endef + +$(KERL): $(KERL_DIR) + +$(KERL_DIR): | $(ERLANG_MK_TMP) + $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl + $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT) + $(verbose) chmod +x $(KERL) + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL_DIR) + +# Allow users to select which version of Erlang/OTP to use for a project. + +ifneq ($(strip $(LATEST_ERLANG_OTP)),) +# In some environments it is necessary to filter out master. +ERLANG_OTP := $(notdir $(lastword $(sort\ + $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\ + $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native])))))) +endif + +ERLANG_OTP ?= +ERLANG_HIPE ?= + +# Use kerl to enforce a specific Erlang/OTP version for a project. +ifneq ($(strip $(ERLANG_OTP)),) +export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH) +SHELL := env PATH=$(PATH) $(SHELL) +$(eval $(call kerl_otp_target,$(ERLANG_OTP))) + +# Build Erlang/OTP only if it doesn't already exist. +ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),) +$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...) +$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2) +endif + +else +# Same for a HiPE enabled VM. +ifneq ($(strip $(ERLANG_HIPE)),) +export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH) +SHELL := env PATH=$(PATH) $(SHELL) +$(eval $(call kerl_hipe_target,$(ERLANG_HIPE))) + +# Build Erlang/OTP only if it doesn't already exist. +ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),) +$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...) +$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2) +endif + +endif +endif + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = master + +PACKAGES += asciideck +pkg_asciideck_name = asciideck +pkg_asciideck_description = Asciidoc for Erlang. +pkg_asciideck_homepage = https://ninenines.eu +pkg_asciideck_fetch = git +pkg_asciideck_repo = https://github.com/ninenines/asciideck +pkg_asciideck_commit = master + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = develop + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += brod +pkg_brod_name = brod +pkg_brod_description = Kafka client in Erlang +pkg_brod_homepage = https://github.com/klarna/brod +pkg_brod_fetch = git +pkg_brod_repo = https://github.com/klarna/brod.git +pkg_brod_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = master + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += chumak +pkg_chumak_name = chumak +pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol. +pkg_chumak_homepage = http://choven.ca +pkg_chumak_fetch = git +pkg_chumak_repo = https://github.com/chovencorp/chumak +pkg_chumak_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.4 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.2 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = default + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elixir +pkg_elixir_name = elixir +pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications +pkg_elixir_homepage = https://elixir-lang.org/ +pkg_elixir_fetch = git +pkg_elixir_repo = https://github.com/elixir-lang/elixir +pkg_elixir_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/elli-lib/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/elli-lib/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = master + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = master + +PACKAGES += escalus +pkg_escalus_name = escalus +pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers +pkg_escalus_homepage = https://github.com/esl/escalus +pkg_escalus_fetch = git +pkg_escalus_repo = https://github.com/esl/escalus +pkg_escalus_commit = master + +PACKAGES += esh_mk +pkg_esh_mk_name = esh_mk +pkg_esh_mk_description = esh template engine plugin for erlang.mk +pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk +pkg_esh_mk_fetch = git +pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git +pkg_esh_mk_commit = master + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = erlexec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = master + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = master + +PACKAGES += find_crate +pkg_find_crate_name = find_crate +pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory +pkg_find_crate_homepage = https://github.com/goertzenator/find_crate +pkg_find_crate_fetch = git +pkg_find_crate_repo = https://github.com/goertzenator/find_crate +pkg_find_crate_commit = master + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_coap +pkg_gen_coap_name = gen_coap +pkg_gen_coap_description = Generic Erlang CoAP Client/Server +pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap +pkg_gen_coap_fetch = git +pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap +pkg_gen_coap_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_leader +pkg_gen_leader_name = gen_leader +pkg_gen_leader_description = leader election behavior +pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival +pkg_gen_leader_fetch = git +pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival +pkg_gen_leader_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_rpc +pkg_gen_rpc_name = gen_rpc +pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages +pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git +pkg_gen_rpc_fetch = git +pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git +pkg_gen_rpc_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += geode +pkg_geode_name = geode +pkg_geode_description = geohash/proximity lookup in pure, uncut erlang. +pkg_geode_homepage = https://github.com/bradfordw/geode +pkg_geode_fetch = git +pkg_geode_repo = https://github.com/bradfordw/geode +pkg_geode_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += grpc +pkg_grpc_name = grpc +pkg_grpc_description = gRPC server in Erlang +pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc +pkg_grpc_fetch = git +pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc +pkg_grpc_commit = master + +PACKAGES += grpc_client +pkg_grpc_client_name = grpc_client +pkg_grpc_client_description = gRPC client in Erlang +pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client +pkg_grpc_client_fetch = git +pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client +pkg_grpc_client_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += i18n +pkg_i18n_name = i18n +pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e) +pkg_i18n_homepage = https://github.com/erlang-unicode/i18n +pkg_i18n_fetch = git +pkg_i18n_repo = https://github.com/erlang-unicode/i18n +pkg_i18n_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = master + +PACKAGES += idna +pkg_idna_name = idna +pkg_idna_description = Erlang IDNA lib +pkg_idna_homepage = https://github.com/benoitc/erlang-idna +pkg_idna_fetch = git +pkg_idna_repo = https://github.com/benoitc/erlang-idna +pkg_idna_commit = master + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = master + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/for-GET/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/for-GET/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = master + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = master + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsone +pkg_jsone_name = jsone +pkg_jsone_description = An Erlang library for encoding, decoding JSON data. +pkg_jsone_homepage = https://github.com/sile/jsone.git +pkg_jsone_fetch = git +pkg_jsone_repo = https://github.com/sile/jsone.git +pkg_jsone_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kafka_protocol +pkg_kafka_protocol_name = kafka_protocol +pkg_kafka_protocol_description = Kafka protocol Erlang library +pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol +pkg_kafka_protocol_fetch = git +pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git +pkg_kafka_protocol_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/erlang-lager/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/erlang-lager/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = master + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimerl +pkg_mimerl_name = mimerl +pkg_mimerl_description = library to handle mimetypes +pkg_mimerl_homepage = https://github.com/benoitc/mimerl +pkg_mimerl_fetch = git +pkg_mimerl_repo = https://github.com/benoitc/mimerl +pkg_mimerl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = MySQL client library for Erlang/OTP +pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp +pkg_mysql_commit = 1.5.1 + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += observer_cli +pkg_observer_cli_name = observer_cli +pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line +pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli +pkg_observer_cli_fetch = git +pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli +pkg_observer_cli_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = master + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += otpbp +pkg_otpbp_name = otpbp +pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19) +pkg_otpbp_homepage = https://github.com/Ledest/otpbp +pkg_otpbp_fetch = git +pkg_otpbp_repo = https://github.com/Ledest/otpbp +pkg_otpbp_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += partisan +pkg_partisan_name = partisan +pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir. +pkg_partisan_homepage = http://partisan.cloud +pkg_partisan_fetch = git +pkg_partisan_repo = https://github.com/lasp-lang/partisan +pkg_partisan_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = master + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgo +pkg_pgo_name = pgo +pkg_pgo_description = Erlang Postgres client and connection pool +pkg_pgo_homepage = https://github.com/erleans/pgo.git +pkg_pgo_fetch = git +pkg_pgo_repo = https://github.com/erleans/pgo.git +pkg_pgo_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prometheus +pkg_prometheus_name = prometheus +pkg_prometheus_description = Prometheus.io client in Erlang +pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl +pkg_prometheus_fetch = git +pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl +pkg_prometheus_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = master + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.2.1 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = master + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += rust_mk +pkg_rust_mk_name = rust_mk +pkg_rust_mk_description = Build Rust crates in an Erlang application +pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk +pkg_rust_mk_fetch = git +pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk +pkg_rust_mk_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = master + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += slack +pkg_slack_name = slack +pkg_slack_description = Minimal slack notification OTP library. +pkg_slack_homepage = https://github.com/DonBranson/slack +pkg_slack_fetch = git +pkg_slack_repo = https://github.com/DonBranson/slack.git +pkg_slack_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += snappyer +pkg_snappyer_name = snappyer +pkg_snappyer_description = Snappy as nif for Erlang +pkg_snappyer_homepage = https://github.com/zmstone/snappyer +pkg_snappyer_fetch = git +pkg_snappyer_repo = https://github.com/zmstone/snappyer.git +pkg_snappyer_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += subproc +pkg_subproc_name = subproc +pkg_subproc_description = unix subprocess manager with {active,once|false} modes +pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc +pkg_subproc_fetch = git +pkg_subproc_repo = https://github.com/dozzie/subproc +pkg_subproc_commit = v0.1.0 + +PACKAGES += supervisor3 +pkg_supervisor3_name = supervisor3 +pkg_supervisor3_description = OTP supervisor with additional strategies +pkg_supervisor3_homepage = https://github.com/klarna/supervisor3 +pkg_supervisor3_fetch = git +pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git +pkg_supervisor3_commit = master + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global Process Registry and Process Group manager for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += toml +pkg_toml_name = toml +pkg_toml_description = TOML (0.4.0) config parser +pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML +pkg_toml_fetch = git +pkg_toml_repo = https://github.com/dozzie/toml +pkg_toml_commit = v0.2.0 + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://triq.gitlab.io +pkg_triq_fetch = git +pkg_triq_repo = https://gitlab.com/triq/triq.git +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = master + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = master + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = master + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = master + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zabbix_sender +pkg_zabbix_sender_name = zabbix_sender +pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang +pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender +pkg_zabbix_sender_fetch = git +pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git +pkg_zabbix_sender_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = master + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps clean-tmp-deps.log + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +REBAR_GIT ?= https://github.com/rebar/rebar +REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01 + +# External "early" plugins (see core/plugins.mk for regular plugins). +# They both use the core_dep_plugin macro. + +define core_dep_plugin +ifeq ($(2),$(PROJECT)) +-include $$(patsubst $(PROJECT)/%,%,$(1)) +else +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endif +endef + +DEP_EARLY_PLUGINS ?= + +$(foreach p,$(DEP_EARLY_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/early-plugins.mk,$p)))) + +# Query functions. + +query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1))) +_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail)) +_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail) + +query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) + +query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1))) +_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1))) + +query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo)) +query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1))) +query_repo_git-subfolder = $(call query_repo_git,$(1)) +query_repo_git-submodule = - +query_repo_hg = $(call query_repo_default,$(1)) +query_repo_svn = $(call query_repo_default,$(1)) +query_repo_cp = $(call query_repo_default,$(1)) +query_repo_ln = $(call query_repo_default,$(1)) +query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1)) +query_repo_fail = - +query_repo_legacy = - + +query_version = $(call _qv,$(1),$(call query_fetch_method,$(1))) +_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1))) + +query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) +query_version_git = $(call query_version_default,$(1)) +query_version_git-subfolder = $(call query_version_git,$(1)) +query_version_git-submodule = - +query_version_hg = $(call query_version_default,$(1)) +query_version_svn = - +query_version_cp = - +query_version_ln = - +query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit))) +query_version_fail = - +query_version_legacy = - + +query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1))) +_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-) + +query_extra_git = - +query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-) +query_extra_git-submodule = - +query_extra_hg = - +query_extra_svn = - +query_extra_cp = - +query_extra_ln = - +query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-) +query_extra_fail = - +query_extra_legacy = - + +query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1))) + +# Deprecated legacy query functions. +dep_fetch = $(call query_fetch_method,$(1)) +dep_name = $(call query_name,$(1)) +dep_repo = $(call query_repo_git,$(1)) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit))) + +LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +# When we are calling an app directly we don't want to include it here +# otherwise it'll be treated both as an apps and a top-level project. +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ifdef ROOT_DIR +ifndef IS_APP +ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS)) +endif +endif + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))"; +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Optimization: don't recompile deps unless truly necessary. + +ifndef IS_DEP +ifneq ($(MAKELEVEL),0) +$(shell rm -f ebin/dep_built) +endif +endif + +# Core targets. + +ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS)) + +apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP) +# Create ebin directory for all apps to make sure Erlang recognizes them +# as proper OTP applications when using -include_lib. This is a temporary +# fix, a proper fix would be to compile apps/* in the right order. +ifndef IS_APP +ifneq ($(ALL_APPS_DIRS),) + $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \ + mkdir -p $$dep/ebin; \ + done +endif +endif +# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only +# compile that list of apps. Otherwise, compile everything. +# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps. +ifneq ($(ALL_APPS_DIRS_TO_BUILD),) + $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \ + $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \ + fi \ + done +endif + +clean-tmp-deps.log: +ifeq ($(IS_APP)$(IS_DEP),) + $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log +endif + +# Erlang.mk does not rebuild dependencies after they were compiled +# once. If a developer is working on the top-level project and some +# dependencies at the same time, he may want to change this behavior. +# There are two solutions: +# 1. Set `FULL=1` so that all dependencies are visited and +# recursively recompiled if necessary. +# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that +# should be recompiled (instead of the whole set). + +FORCE_REBUILD ?= + +ifeq ($(origin FULL),undefined) +ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),) +define force_rebuild_dep +echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")" +endef +endif +endif + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP) +ifneq ($(ALL_DEPS_DIRS),) + $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \ + :; \ + elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1; \ + if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \ + else \ + echo "Error: No Makefile to build dependency $$dep." >&2; \ + exit 2; \ + fi \ + fi \ + done +endif +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + rm -rf $(DEPS_DIR)/$1/ebin/; \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \ + $(call dep_autopatch2,$1); \ + elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \ + mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \ + rm -f $(DEPS_DIR)/$1/ebin/$1.app; \ + if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \ + $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \ + fi; \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Replace "include erlang.mk" with a line that will load the parent Erlang.mk +# if given. Do it for all 3 possible Makefile file names. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + for f in Makefile makefile GNUmakefile; do \ + if [ -f $(DEPS_DIR)/$1/$$f ]; then \ + sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \ + fi \ + done +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +# We use flock/lockf when available to avoid concurrency issues. +define dep_autopatch_fetch_rebar + if command -v flock >/dev/null; then \ + flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \ + elif command -v lockf >/dev/null; then \ + lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \ + else \ + $(call dep_autopatch_fetch_rebar2); \ + fi +endef + +define dep_autopatch_fetch_rebar2 + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q $(REBAR_COMMIT); \ + ./bootstrap; \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + rmemo:start(), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + ToList = fun + (V) when is_atom(V) -> atom_to_list(V); + (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'" + end, + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n"); + ({d, DKey, DVal}) -> + Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + GetHexVsn = fun(N, NP) -> + case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of + {ok, Lock} -> + io:format("~p~n", [Lock]), + case lists:keyfind("1.1.0", 1, Lock) of + {_, LockPkgs} -> + io:format("~p~n", [LockPkgs]), + case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of + {_, {pkg, _, Vsn}, _} -> + io:format("~p~n", [Vsn]), + {N, {hex, NP, binary_to_list(Vsn)}}; + _ -> + false + end; + _ -> + false + end; + _ -> + false + end + end, + SemVsn = fun + ("~>" ++ S0) -> + S = case S0 of + " " ++ S1 -> S1; + _ -> S0 + end, + case length([ok || $$. <- S]) of + 0 -> S ++ ".0.0"; + 1 -> S ++ ".0"; + _ -> S + end; + (S) -> S + end, + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + N when is_atom(N) -> GetHexVsn(N, N); + {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}}; + {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP); + {N, S, {pkg, NP}} -> {N, {hex, NP, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, NPV, V} -> {hex, V, NPV}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + "lrx." ++ Elif -> lists:reverse(Elif); + "lry." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]), + case Cmd2 of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V0) -> + V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]), + V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]), + re:replace(V, "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\t@:\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case {filename:extension(Output), $(PLATFORM)} of + {[], _} -> "\n"; + {_, darwin} -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins0} -> + Plugins = [P || P <- Plugins0, is_tuple(P)], + case lists:keyfind('lfe-compile', 1, Plugins) of + false -> ok; + _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n") + end + end + end(), + Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins0} -> + Plugins = [P || P <- Plugins0, is_atom(P)], + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_appsrc_script.erl + AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcScript = AppSrc ++ ".script", + {ok, Conf0} = file:consult(AppSrc), + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1), + Conf = case file:script(AppSrcScript, Bindings) of + {ok, [C]} -> C; + {ok, C} -> C + end, + ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of + {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))}); + {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"}); + _ -> L1 + end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-subfolder + mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \ + git clone -q -n -- $(call dep_repo,$1) \ + $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \ + cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \ + && git checkout -q $(call dep_commit,$1); \ + ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \ + $(DEPS_DIR)/$(call dep_name,$1); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_ln + ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \ + $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\ + https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \ + tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -; +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP) + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1)) + $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \ + && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME) +endif + +.PHONY: autopatch-$(call dep_name,$1) + +autopatch-$(call dep_name,$1):: + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \ + ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \ + else \ + $$(call dep_autopatch,$(call dep_name,$1)) \ + fi +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log + +ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log +ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log +ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log +ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log +ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log + +# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_ASN1_OPTS ?= + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +app:: $(if $(wildcard ebin/test),clean) deps + $(verbose) $(MAKE) --no-print-directory $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build + +ifeq ($(wildcard src/$(PROJECT_MOD).erl),) +define app_file +{application, '$(PROJECT)', [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]}, + {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),) +]}. +endef +else +define app_file +{application, '$(PROJECT)', [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]}, + {mod, {$(PROJECT_MOD), []}}, + {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),) +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ALL_SRC_FILES := $(sort $(call core_find,src/,*)) + +ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES)) +CORE_FILES := $(filter %.core,$(ALL_SRC_FILES)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1) + $(verbose) mv asn1/*.erl src/ + -$(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?) + +# Erlang and Core Erlang files. + +define makedep.erl + E = ets:new(makedep, [bag]), + G = digraph:new([acyclic]), + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + DepsDir = "$(call core_native_path,$(DEPS_DIR))", + AppsDir = "$(call core_native_path,$(APPS_DIR))", + DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))", + DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))", + AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))", + AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))", + DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")), + AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")), + Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles], + Add = fun (Mod, Dep) -> + case lists:keyfind(Dep, 1, Modules) of + false -> ok; + {_, DepFile} -> + {_, ModFile} = lists:keyfind(Mod, 1, Modules), + ets:insert(E, {ModFile, DepFile}), + digraph:add_vertex(G, Mod), + digraph:add_vertex(G, Dep), + digraph:add_edge(G, Mod, Dep) + end + end, + AddHd = fun (F, Mod, DepFile) -> + case file:open(DepFile, [read]) of + {error, enoent} -> + ok; + {ok, Fd} -> + {_, ModFile} = lists:keyfind(Mod, 1, Modules), + case ets:match(E, {ModFile, DepFile}) of + [] -> + ets:insert(E, {ModFile, DepFile}), + F(F, Fd, Mod,0); + _ -> ok + end + end + end, + SearchHrl = fun + F(_Hrl, []) -> {error,enoent}; + F(Hrl, [Dir|Dirs]) -> + HrlF = filename:join([Dir,Hrl]), + case filelib:is_file(HrlF) of + true -> + {ok, HrlF}; + false -> F(Hrl,Dirs) + end + end, + Attr = fun + (_F, Mod, behavior, Dep) -> + Add(Mod, Dep); + (_F, Mod, behaviour, Dep) -> + Add(Mod, Dep); + (_F, Mod, compile, {parse_transform, Dep}) -> + Add(Mod, Dep); + (_F, Mod, compile, Opts) when is_list(Opts) -> + case proplists:get_value(parse_transform, Opts) of + undefined -> ok; + Dep -> Add(Mod, Dep) + end; + (F, Mod, include, Hrl) -> + case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of + {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl); + {error, _} -> false + end; + (F, Mod, include_lib, Hrl) -> + case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of + {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl); + {error, _} -> false + end; + (F, Mod, import, {Imp, _}) -> + IsFile = + case lists:keyfind(Imp, 1, Modules) of + false -> false; + {_, FilePath} -> filelib:is_file(FilePath) + end, + case IsFile of + false -> ok; + true -> Add(Mod, Imp) + end; + (_, _, _, _) -> ok + end, + MakeDepend = fun + (F, Fd, Mod, StartLocation) -> + {ok, Filename} = file:pid2name(Fd), + case io:parse_erl_form(Fd, undefined, StartLocation) of + {ok, AbsData, EndLocation} -> + case AbsData of + {attribute, _, Key, Value} -> + Attr(F, Mod, Key, Value), + F(F, Fd, Mod, EndLocation); + _ -> F(F, Fd, Mod, EndLocation) + end; + {eof, _ } -> file:close(Fd); + {error, ErrorDescription } -> + file:close(Fd); + {error, ErrorInfo, ErrorLocation} -> + F(F, Fd, Mod, ErrorLocation) + end, + ok + end, + [begin + Mod = list_to_atom(filename:basename(F, ".erl")), + case file:open(F, [read]) of + {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0); + {error, enoent} -> ok + end + end || F <- ErlFiles], + Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))), + CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)], + TargetPath = fun(Target) -> + case lists:keyfind(Target, 1, Modules) of + false -> ""; + {_, DepFile} -> + DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")), + string:join(DirSubname ++ [atom_to_list(Target)], "/") + end + end, + Output0 = [ + "# Generated by Erlang.mk. Edit at your own risk!\n\n", + [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend], + "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n" + ], + Output = case "é" of + [233] -> unicode:characters_to_binary(Output0); + _ -> Output0 + end, + ok = file:write_file("$(1)", Output), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifeq ($(IS_APP)$(IS_DEP),) +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP) + $(verbose) if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + $(verbose) touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif +endif + +$(PROJECT).d:: + $(verbose) : + +include $(wildcard $(PROJECT).d) + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +define validate_app_file + case file:consult("ebin/$(PROJECT).app") of + {ok, _} -> halt(); + _ -> halt(1) + end +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) +# Older git versions do not have the --first-parent flag. Do without in that case. + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \ + || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \ + > ebin/$(PROJECT).app + $(verbose) if ! $(call erlang,$(call validate_app_file)); then \ + echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \ + exit 1; \ + fi +else + $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \ + > ebin/$(PROJECT).app +endif +ifneq ($(wildcard src/$(PROJECT).appup),) + $(verbose) cp src/$(PROJECT).appup ebin/ +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> +# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \ + if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \ + :; \ + else \ + $(MAKE) -C $$dep IS_DEP=1; \ + if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \ + fi \ + done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build + @: + +test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE)))); +test_erlc_verbose_2 = set -x; +test_erlc_verbose = $(test_erlc_verbose_$(V)) + +define compile_test_erl + $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \ + -pa ebin/ -I include/ $(1) +endef + +ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl) +$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST) + $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@) +endif + +test-build:: IS_TEST=1 +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps) +# We already compiled everything when IS_APP=1. +ifndef IS_APP +ifneq ($(wildcard src),) + $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" + $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" + $(gen_verbose) touch ebin/test +endif +ifneq ($(wildcard $(TEST_DIR)),) + $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" +endif +endif + +# Roughly the same as test-build, but when IS_APP=1. +# We only care about compiling the current application. +ifdef IS_APP +test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build-app:: deps test-deps +ifneq ($(wildcard src),) + $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" + $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" + $(gen_verbose) touch ebin/test +endif +ifneq ($(wildcard $(TEST_DIR)),) + $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" +endif +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build +endif + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_erlc_opts_to_list +[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))] +endef + +define compat_rebar_config +{deps, [ +$(call comma_list,$(foreach d,$(DEPS),\ + $(if $(filter hex,$(call dep_fetch,$d)),\ + {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\ + {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}}))) +]}. +{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}. +endef + +rebar.config: + $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config) + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck) + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual + +# Core targets. + +docs:: asciidoc + +distclean:: distclean-asciidoc-guide distclean-asciidoc-manual + +# Plugin-specific targets. + +asciidoc: asciidoc-guide asciidoc-manual + +# User guide. + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: distclean-asciidoc-guide doc-deps + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ + +distclean-asciidoc-guide: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf +endif + +# Man pages. + +ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc) + +ifeq ($(ASCIIDOC_MANUAL_FILES),) +asciidoc-manual: +else + +# Configuration. + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 +MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/') +MAN_VERSION ?= $(PROJECT_VERSION) + +# Plugin-specific targets. + +define asciidoc2man.erl +try + [begin + io:format(" ADOC ~s~n", [F]), + ok = asciideck:to_manpage(asciideck:parse_file(F), #{ + compress => gzip, + outdir => filename:dirname(F), + extra2 => "$(MAN_PROJECT) $(MAN_VERSION)", + extra3 => "$(MAN_PROJECT) Function Reference" + }) + end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]], + halt(0) +catch C:E -> + io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]), + halt(1) +end. +endef + +asciidoc-manual:: doc-deps + +asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES) + $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?)) + $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;) + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + $(foreach s,$(MAN_SECTIONS),\ + mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \ + install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;) + +distclean-asciidoc-manual: + $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS)) +endif +endif + +# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app in=NAME Create a new local OTP application NAME" \ + " new-lib in=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +# To prevent autocompletion issues with ZSH, we add "include erlang.mk" +# separately during the actual bootstrap. +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.1.0 +$(if $(SP), +# Whitespace to be used when creating files from templates. +SP = $(SP) +) +endef + +define bs_apps_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.1.0 +$(if $(SP), +# Whitespace to be used when creating files from templates. +SP = $(SP) +) +# Make sure we know where the applications are located. +ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app) +APPS_DIR ?= .. +DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app) + +include $$(ROOT_DIR)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}. +{extended_start_script, true}. +{sys_config, "config/sys.config"}. +{vm_args, "config/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_module +-module($(n)). +-export([]). +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_gen_statem +-module($(n)). +-behaviour(gen_statem). + +%% API. +-export([start_link/0]). + +%% gen_statem. +-export([callback_mode/0]). +-export([init/1]). +-export([state_name/3]). +-export([handle_event/4]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_statem:start_link(?MODULE, [], []). + +%% gen_statem. + +callback_mode() -> + state_functions. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_EventType, _EventData, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_EventType, _EventData, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"<html><body>This is REST!</body></html>">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ + $(error Error: Invalid characters in the application name)) + $(eval n := $(PROJECT)_sup) + $(verbose) $(call core_render,bs_Makefile,Makefile) + $(verbose) echo "include erlang.mk" >> Makefile + $(verbose) mkdir src/ +ifdef LEGACY + $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src) +endif + $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl) + $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ + $(error Error: Invalid characters in the application name)) + $(verbose) $(call core_render,bs_Makefile,Makefile) + $(verbose) echo "include erlang.mk" >> Makefile + $(verbose) mkdir src/ +ifdef LEGACY + $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard config/),) + $(error Error: config/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(verbose) $(call core_render,bs_relx_config,relx.config) + $(verbose) mkdir config/ + $(verbose) $(call core_render,bs_sys_config,config/sys.config) + $(verbose) $(call core_render,bs_vm_args,config/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ + $(error Error: Invalid characters in the application name)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\ + $(error Error: Invalid characters in the application name)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl) +else + $(verbose) $(call core_render,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) @echo Available templates: + $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT) +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),msys2) + C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe + C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll +else + C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= + C_SRC_OUTPUT_SHARED_EXTENSION ?= .so +endif + +ifeq ($(C_SRC_TYPE),shared) + C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION) +else + C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION) +endif + +ifeq ($(PLATFORM),msys2) +# We hardcode the compiler used on MSYS2. The default CC=cc does +# not produce working code. The "gcc" MSYS2 package also doesn't. + CC = /mingw64/bin/gcc + export CC + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +ifneq ($(PLATFORM),msys2) + CFLAGS += -fPIC + CXXFLAGS += -fPIC +endif + +CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)" +CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)" + +LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F)); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F)); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE) + +$(C_SRC_OUTPUT_FILE): $(OBJECTS) + $(verbose) mkdir -p $(dir $@) + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT_FILE) + +$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().') + +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \ + io_lib:format( \ + \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \ + \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) + +ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR)) +$(shell rm -f $(C_SRC_ENV)) +endif +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifndef n + $(error Usage: $(MAKE) new-nif n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(verbose) $(call core_render,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-prepare ci-setup + +CI_OTP ?= +CI_HIPE ?= +CI_ERLLVM ?= + +ifeq ($(CI_VM),native) +ERLC_OPTS += +native +TEST_ERLC_OPTS += +native +else ifeq ($(CI_VM),erllvm) +ERLC_OPTS += +native +'{hipe, [to_llvm]}' +TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}' +endif + +ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),) +ci:: +else + +ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM))) + +ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE))) + +ci-setup:: + $(verbose) : + +ci-extra:: + $(verbose) : + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$1: $(KERL_INSTALL_DIR)/$2 + $(verbose) $(MAKE) --no-print-directory clean + $(ci_verbose) \ + PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \ + CI_OTP_RELEASE="$1" \ + CT_OPTS="-label $1" \ + CI_VM="$3" \ + $(MAKE) ci-setup tests + $(verbose) $(MAKE) --no-print-directory ci-extra +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp))) +$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native))) +$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm))) + +$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp)))) +$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp)))) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +endif + +# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifdef CONCUERROR_TESTS + +.PHONY: concuerror distclean-concuerror + +# Configuration + +CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs +CONCUERROR_OPTS ?= + +# Core targets. + +check:: concuerror + +ifndef KEEP_LOGS +distclean:: distclean-concuerror +endif + +# Plugin-specific targets. + +$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP) + $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror + $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror + +$(CONCUERROR_LOGS_DIR): + $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR) + +define concuerror_html_report +<!DOCTYPE html> +<html lang="en"> +<head> +<meta charset="utf-8"> +<title>Concuerror HTML report</title> +</head> +<body> +<h1>Concuerror HTML report</h1> +<p>Generated on $(concuerror_date)</p> +<ul> +$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>) +</ul> +</body> +</html> +endef + +concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS))) + $(eval concuerror_date := $(shell date)) + $(eval concuerror_targets := $^) + $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html) + +define concuerror_target +.PHONY: concuerror-$1-$2 + +concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR) + $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \ + --pa $(CURDIR)/ebin --pa $(TEST_DIR) \ + -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \ + $$(CONCUERROR_OPTS) -m $1 -t $2 +endef + +$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test)))))) + +distclean-concuerror: + $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR) + +endif + +# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct apps-ct distclean-ct + +# Configuration. + +CT_OPTS ?= + +ifneq ($(wildcard $(TEST_DIR)),) +ifndef CT_SUITES +CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +endif +endif +CT_SUITES ?= +CT_LOGS_DIR ?= $(CURDIR)/logs + +# Core targets. + +tests:: ct + +ifndef KEEP_LOGS +distclean:: distclean-ct +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CT_LOGS_DIR) + +ifeq ($(CT_SUITES),) +ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct) +else +# We do not run tests if we are in an apps/* with no test directory. +ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1) +ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct) + $(verbose) mkdir -p $(CT_LOGS_DIR) + $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif +endif + +ifneq ($(ALL_APPS_DIRS),) +define ct_app_target +apps-ct-$1: test-build + $$(MAKE) -C $1 ct IS_APP=1 +endef + +$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app)))) + +apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS)) +endif + +ifdef t +ifeq (,$(findstring :,$t)) +CT_EXTRA = -group $t +else +t_words = $(subst :, ,$t) +CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words)) +endif +else +ifdef c +CT_EXTRA = -case $c +else +CT_EXTRA = +endif +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CT_LOGS_DIR) + $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CT_LOGS_DIR) + +# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS) +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs +DIALYZER_PLT_OPTS ?= + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +define filter_opts.erl + Opts = init:get_plain_arguments(), + {Filtered, _} = lists:foldl(fun + (O, {Os, true}) -> {[O|Os], false}; + (O = "-D", {Os, _}) -> {[O|Os], true}; + (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false}; + (O = "-I", {Os, _}) -> {[O|Os], true}; + (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false}; + (O = "-pa", {Os, _}) -> {[O|Os], true}; + (_, Acc) -> Acc + end, {[], false}, Opts), + io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]), + halt(). +endef + +# DIALYZER_PLT is a variable understood directly by Dialyzer. +# +# We append the path to erts at the end of the PLT. This works +# because the PLT file is in the external term format and the +# function binary_to_term/1 ignores any trailing data. +$(DIALYZER_PLT): deps app + $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \ + while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log)) + $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \ + erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2 + $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@ + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app) + $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \ + grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \ + rm $(DIALYZER_PLT); \ + $(MAKE) plt; \ + fi +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native `$(ERL) \ + -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \ + -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/) + +# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= +EDOC_SRC_DIRS ?= +EDOC_OUTPUT ?= doc + +define edoc.erl + SrcPaths = lists:foldl(fun(P, Acc) -> + filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc + end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]), + DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}], + edoc:application($(1), ".", [$(2)] ++ DefaultOpts), + halt(0). +endef + +# Core targets. + +ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),) +docs:: edoc +endif + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: distclean-edoc doc-deps + $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS))) + +distclean-edoc: + $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info + +# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_PREFIX ?= +DTL_SUFFIX ?= _dtl +DTL_OPTS ?= + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +DTL_PATH := $(abspath $(DTL_PATH)) +DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifneq ($(DTL_FILES),) + +DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%))) +DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES))) +BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES))) + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild templates when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP) + $(verbose) if test -f $@; then \ + touch $(DTL_FILES); \ + fi + $(verbose) touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/ + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\ + -pa ebin/)) + +endif + +# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> +# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript escript-zip + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_FILE ?= $(ESCRIPT_NAME) + +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file +ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME) + +ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null) +ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +escript-zip:: FULL=1 +escript-zip:: deps app + $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP)) + $(verbose) rm -f $(ESCRIPT_ZIP_FILE) + $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/* +ifneq ($(DEPS),) + $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \ + $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \ + $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log))))) +endif + +escript:: escript-zip + $(gen_verbose) printf "%s\n" \ + "#!$(ESCRIPT_SHEBANG)" \ + "%% $(ESCRIPT_COMMENT)" \ + "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE) + $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE) + $(verbose) chmod +x $(ESCRIPT_FILE) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_FILE) + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com> +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: eunit apps-eunit + +# Configuration + +EUNIT_OPTS ?= +EUNIT_ERL_OPTS ?= + +# Core targets. + +tests:: eunit + +help:: + $(verbose) printf "%s\n" "" \ + "EUnit targets:" \ + " eunit Run all the EUnit tests for this project" + +# Plugin-specific targets. + +define eunit.erl + $(call cover.erl) + CoverSetup(), + case eunit:test($1, [$(EUNIT_OPTS)]) of + ok -> ok; + error -> halt(2) + end, + CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"), + halt() +endef + +EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin + +ifdef t +ifeq (,$(findstring :,$(t))) +eunit: test-build cover-data-dir + $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS)) +else +eunit: test-build cover-data-dir + $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS)) +endif +else +EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES))) +EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl))) + +EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \ + $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)') + +eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir +ifneq ($(wildcard src/ $(TEST_DIR)),) + $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS)) +endif + +ifneq ($(ALL_APPS_DIRS),) +apps-eunit: test-build + $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \ + [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \ + exit $$eunit_retcode +endif +endif + +# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper) +.PHONY: proper + +# Targets. + +tests:: proper + +define proper_check.erl + $(call cover.erl) + code:add_pathsa([ + "$(call core_native_path,$(CURDIR)/ebin)", + "$(call core_native_path,$(DEPS_DIR)/*/ebin)", + "$(call core_native_path,$(TEST_DIR))"]), + Module = fun(M) -> + [true] =:= lists:usort([ + case atom_to_list(F) of + "prop_" ++ _ -> + io:format("Testing ~p:~p/0~n", [M, F]), + proper:quickcheck(M:F(), nocolors); + _ -> + true + end + || {F, 0} <- M:module_info(exports)]) + end, + try begin + CoverSetup(), + Res = case $(1) of + all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]); + module -> Module($(2)); + function -> proper:quickcheck($(2), nocolors) + end, + CoverExport("$(COVER_DATA_DIR)/proper.coverdata"), + Res + end of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +proper: test-build cover-data-dir + $(verbose) $(call erlang,$(call proper_check.erl,module,$(t))) +else +proper: test-build cover-data-dir + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)())) +endif +else +proper: test-build cover-data-dir + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam)))))) + $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +ifneq ($(wildcard src/),) +ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),) +PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES)) +ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES)))) + +ifeq ($(PROTO_FILES),) +$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: + $(verbose) : +else +# Rebuild proto files when the Makefile changes. +# We exclude $(PROJECT).d to avoid a circular dependency. +$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP) + $(verbose) if test -f $@; then \ + touch $(PROTO_FILES); \ + fi + $(verbose) touch $@ + +$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs +endif + +ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),) +define compile_proto.erl + [begin + protobuffs_compile:generate_source(F, [ + {output_include_dir, "./include"}, + {output_src_dir, "./src"}]) + end || F <- string:tokens("$1", " ")], + halt(). +endef +else +define compile_proto.erl + [begin + gpb_compile:file(F, [ + {include_as_lib, true}, + {module_name_suffix, "_pb"}, + {o_hrl, "./include"}, + {o_erl, "./src"}]) + end || F <- string:tokens("$1", " ")], + halt(). +endef +endif + +ifneq ($(PROTO_FILES),) +$(PROJECT).d:: $(PROTO_FILES) + $(verbose) mkdir -p ebin/ include/ + $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?))) +endif +endif +endif + +# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel relx-relup distclean-relx-rel run + +# Configuration. + +RELX ?= $(ERLANG_MK_TMP)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://erlang.mk/res/relx-v3.27.0 +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel +RELX_REL_EXT ?= +RELX_TAR ?= 1 + +ifdef SFX + RELX_TAR = 1 +endif + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel + +relup:: relx-relup +endif +endif + +distclean:: distclean-relx-rel + +# Plugin-specific targets. + +$(RELX): | $(ERLANG_MK_TMP) + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release + $(verbose) $(MAKE) relx-post-rel +ifeq ($(RELX_TAR),1) + $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar +endif + +relx-relup: $(RELX) rel-deps app + $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release + $(MAKE) relx-post-rel + $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +# Default hooks. +relx-post-rel:: + $(verbose) : + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run:: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"), + {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config), + Vsn = case Vsn0 of + {cmd, Cmd} -> os:cmd(Cmd); + semver -> ""; + {semver, _} -> ""; + VsnStr -> Vsn0 + end, + Extended = case lists:keyfind(extended_start_script, 1, Config) of + {_, true} -> "1"; + _ -> "" + end, + io:format("~s ~s ~s", [Name, Vsn, Extended]), + halt(0). +endef + +RELX_REL := $(shell $(call erlang,$(get_relx_release.erl))) +RELX_REL_NAME := $(word 1,$(RELX_REL)) +RELX_REL_VSN := $(word 2,$(RELX_REL)) +RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console) + +ifeq ($(PLATFORM),msys2) +RELX_REL_EXT := .cmd +endif + +run:: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD) + +ifdef RELOAD +rel:: + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \ + eval "io:format(\"~p~n\", [c:lm()])" +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# Copyright (c) 2014, M Robert Martin <rob@version2beta.com> +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR) +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +build-shell-deps: +else +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \ + if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \ + :; \ + else \ + $(MAKE) -C $$dep IS_DEP=1; \ + if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \ + fi \ + done +endif + +shell:: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-sphinx sphinx + +# Configuration. + +SPHINX_BUILD ?= sphinx-build +SPHINX_SOURCE ?= doc +SPHINX_CONFDIR ?= +SPHINX_FORMATS ?= html +SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees +SPHINX_OPTS ?= + +#sphinx_html_opts = +#sphinx_html_output = html +#sphinx_man_opts = +#sphinx_man_output = man +#sphinx_latex_opts = +#sphinx_latex_output = latex + +# Helpers. + +sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q +sphinx_build_1 = $(SPHINX_BUILD) -N +sphinx_build_2 = set -x; $(SPHINX_BUILD) +sphinx_build = $(sphinx_build_$(V)) + +define sphinx.build +$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1) + +endef + +define sphinx.output +$(if $(sphinx_$1_output),$(sphinx_$1_output),$1) +endef + +# Targets. + +ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),) +docs:: sphinx +distclean:: distclean-sphinx +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Sphinx targets:" \ + " sphinx Generate Sphinx documentation." \ + "" \ + "ReST sources and 'conf.py' file are expected in directory pointed by" \ + "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \ + "'html' format is generated by default); target directory can be specified by" \ + 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \ + "Additional Sphinx options can be set in SPHINX_OPTS." + +# Plugin-specific targets. + +sphinx: + $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F)) + +distclean-sphinx: + $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F))) + +# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS + +show-ERL_LIBS: + @echo $(ERL_LIBS) + +show-ERLC_OPTS: + @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";) + +show-TEST_ERLC_OPTS: + @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";) + +# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + $(call cover.erl) + code:add_pathsa([ + "$(call core_native_path,$(CURDIR)/ebin)", + "$(call core_native_path,$(DEPS_DIR)/*/ebin)", + "$(call core_native_path,$(TEST_DIR))"]), + try begin + CoverSetup(), + Res = case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end, + CoverExport("$(COVER_DATA_DIR)/triq.coverdata"), + Res + end of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build cover-data-dir + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build cover-data-dir + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build cover-data-dir + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREFR_ARGS := +else + XREFR_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr + +# Core targets. + +help:: + $(verbose) printf '%s\n' '' \ + 'Xref targets:' \ + ' xref Run Xrefr using $$XREF_CONFIG as config file if defined' + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> +# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR ?= cover +COVER_DATA_DIR ?= $(COVER_REPORT_DIR) + +ifdef COVER +COVER_APPS ?= $(notdir $(ALL_APPS_DIRS)) +COVER_DEPS ?= +endif + +# Code coverage for Common Test. + +ifdef COVER +ifdef CT_RUN +ifneq ($(wildcard $(TEST_DIR)),) +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: cover-data-dir + $(gen_verbose) printf "%s\n" \ + "{incl_app, '$(PROJECT)', details}." \ + "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \ + $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \ + $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \ + '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif +endif + +# Code coverage for other tools. + +ifdef COVER +define cover.erl + CoverSetup = fun() -> + Dirs = ["$(call core_native_path,$(CURDIR)/ebin)" + $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)") + $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")], + [begin + case filelib:is_dir(Dir) of + false -> false; + true -> + case cover:compile_beam_directory(Dir) of + {error, _} -> halt(1); + _ -> true + end + end + end || Dir <- Dirs] + end, + CoverExport = fun(Filename) -> cover:export(Filename) end, +endef +else +define cover.erl + CoverSetup = fun() -> ok end, + CoverExport = fun(_) -> ok end, +endef +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif + +cover-data-dir: | $(COVER_DATA_DIR) + +$(COVER_DATA_DIR): + $(verbose) mkdir -p $(COVER_DATA_DIR) +else +cover-data-dir: +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge all coverdata files into all.coverdata." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec + +# Merge all coverdata files into one. +define cover_export.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + cover:export("$(COVER_DATA_DIR)/$@"), halt(0). +endef + +all.coverdata: $(COVERDATA) cover-data-dir + $(gen_verbose) $(call erlang,$(cover_export.erl)) + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) +ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR)) + $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR)) +endif + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end, + TotalPerc = Perc(TotalY, TotalN), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "<!DOCTYPE html><html>~n" + "<head><meta charset=\"UTF-8\">~n" + "<title>Coverage report</title></head>~n" + "<body>~n", []), + io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]), + io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []), + [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>" + "<td>~p%</td></tr>~n", + [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "</table>~n" + "<p>Generated using ~s and erlang.mk on ~s.</p>~n" + "</body></html>", [How, Date]), + halt(). +endef + +cover-report: + $(verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: sfx + +ifdef RELX_REL +ifdef SFX + +# Configuration. + +SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz +SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run + +# Core targets. + +rel:: sfx + +# Plugin-specific targets. + +define sfx_stub +#!/bin/sh + +TMPDIR=`mktemp -d` +ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0` +FILENAME=$$(basename $$0) +REL=$${FILENAME%.*} + +tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR + +$$TMPDIR/bin/$$REL console +RET=$$? + +rm -rf $$TMPDIR + +exit $$RET + +__ARCHIVE_BELOW__ +endef + +sfx: + $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE)) + $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE) + $(verbose) chmod +x $(SFX_OUTPUT_FILE) + +endif +endif + +# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# External plugins. + +DEP_PLUGINS ?= + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +help:: help-plugins + +help-plugins:: + $(verbose) : + +# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu> +# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies recursively (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \ + $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ + $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ + $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ + $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS) +endif + +ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP) +ifeq ($(IS_APP)$(IS_DEP),) + $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST) +endif + $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST) + $(verbose) set -e; for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \ + echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps \ + IS_DEP=1 \ + ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \ + fi \ + fi \ + done +ifeq ($(IS_APP)$(IS_DEP),) + $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \ + uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted + $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \ + || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ + $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted + $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST) +endif +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ + +# Query dependencies recursively. + +.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \ + query-shell-deps + +QUERY ?= name fetch_method repo version + +define query_target +$(1): $(2) clean-tmp-query.log +ifeq ($(IS_APP)$(IS_DEP),) + $(verbose) rm -f $(4) +endif + $(verbose) $(foreach dep,$(3),\ + echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;) + $(if $(filter-out query-deps,$(1)),,\ + $(verbose) set -e; for dep in $(3) ; do \ + if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \ + :; \ + else \ + echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \ + $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \ + fi \ + done) +ifeq ($(IS_APP)$(IS_DEP),) + $(verbose) touch $(4) + $(verbose) cat $(4) +endif +endef + +clean-tmp-query.log: +ifeq ($(IS_DEP),) + $(verbose) rm -f $(ERLANG_MK_TMP)/query.log +endif + +$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE))) +$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE))) +$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE))) +$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE))) +$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE))) diff --git a/deps/rabbitmq_mqtt/include/mqtt_machine.hrl b/deps/rabbitmq_mqtt/include/mqtt_machine.hrl new file mode 100644 index 0000000000..b670c7b32e --- /dev/null +++ b/deps/rabbitmq_mqtt/include/mqtt_machine.hrl @@ -0,0 +1,8 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-record(machine_state, {client_ids = #{}}). diff --git a/deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl b/deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl new file mode 100644 index 0000000000..912f5ad46f --- /dev/null +++ b/deps/rabbitmq_mqtt/include/rabbit_mqtt.hrl @@ -0,0 +1,92 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-define(CLIENT_ID_MAXLEN, 23). + +%% reader state +-record(state, { socket, + conn_name, + await_recv, + deferred_recv, + received_connect_frame, + connection_state, + keepalive, + keepalive_sup, + conserve, + parse_state, + proc_state, + connection, + stats_timer }). + +%% processor state +-record(proc_state, { socket, + subscriptions, + consumer_tags, + unacked_pubs, + awaiting_ack, + awaiting_seqno, + message_id, + client_id, + clean_sess, + will_msg, + channels, + connection, + exchange, + adapter_info, + ssl_login_name, + %% Retained messages handler. See rabbit_mqtt_retainer_sup + %% and rabbit_mqtt_retainer. + retainer_pid, + auth_state, + send_fun, + peer_addr, + mqtt2amqp_fun, + amqp2mqtt_fun, + register_state }). + +-record(auth_state, {username, + user, + vhost}). + +%% does not include vhost: it is used in +%% the table name +-record(retained_message, {topic, + mqtt_msg}). + +-define(INFO_ITEMS, + [host, + port, + peer_host, + peer_port, + protocol, + channels, + channel_max, + frame_max, + client_properties, + ssl, + ssl_protocol, + ssl_key_exchange, + ssl_cipher, + ssl_hash, + conn_name, + connection_state, + connection, + consumer_tags, + unacked_pubs, + awaiting_ack, + awaiting_seqno, + message_id, + client_id, + clean_sess, + will_msg, + exchange, + ssl_login_name, + retainer_pid, + user, + vhost]). + +-define(MQTT_GUIDE_URL, <<"https://rabbitmq.com/mqtt.html">>). diff --git a/deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl b/deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl new file mode 100644 index 0000000000..2b06da502b --- /dev/null +++ b/deps/rabbitmq_mqtt/include/rabbit_mqtt_frame.hrl @@ -0,0 +1,90 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-define(PROTOCOL_NAMES, [{3, "MQIsdp"}, {4, "MQTT"}]). + +%% frame types + +-define(CONNECT, 1). +-define(CONNACK, 2). +-define(PUBLISH, 3). +-define(PUBACK, 4). +-define(PUBREC, 5). +-define(PUBREL, 6). +-define(PUBCOMP, 7). +-define(SUBSCRIBE, 8). +-define(SUBACK, 9). +-define(UNSUBSCRIBE, 10). +-define(UNSUBACK, 11). +-define(PINGREQ, 12). +-define(PINGRESP, 13). +-define(DISCONNECT, 14). + +%% connect return codes + +-define(CONNACK_ACCEPT, 0). +-define(CONNACK_PROTO_VER, 1). %% unacceptable protocol version +-define(CONNACK_INVALID_ID, 2). %% identifier rejected +-define(CONNACK_SERVER, 3). %% server unavailable +-define(CONNACK_CREDENTIALS, 4). %% bad user name or password +-define(CONNACK_AUTH, 5). %% not authorized + +%% qos levels + +-define(QOS_0, 0). +-define(QOS_1, 1). +-define(QOS_2, 2). + +%% TODO +-type message_id() :: any(). + +-record(mqtt_frame, {fixed, + variable, + payload}). + +-record(mqtt_frame_fixed, {type = 0, + dup = 0, + qos = 0, + retain = 0}). + +-record(mqtt_frame_connect, {proto_ver, + will_retain, + will_qos, + will_flag, + clean_sess, + keep_alive, + client_id, + will_topic, + will_msg, + username, + password}). + +-record(mqtt_frame_connack, {session_present, + return_code}). + +-record(mqtt_frame_publish, {topic_name, + message_id}). + +-record(mqtt_frame_subscribe,{message_id, + topic_table}). + +-record(mqtt_frame_suback, {message_id, + qos_table = []}). + +-record(mqtt_topic, {name, + qos}). + +-record(mqtt_frame_other, {other}). + +-record(mqtt_msg, {retain :: boolean(), + qos :: ?QOS_0 | ?QOS_1 | ?QOS_2, + topic :: string(), + dup :: boolean(), + message_id :: message_id(), + payload :: binary()}). + +-type mqtt_msg() :: #mqtt_msg{}. diff --git a/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl b/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl new file mode 100644 index 0000000000..52b61b5924 --- /dev/null +++ b/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl @@ -0,0 +1,6 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% diff --git a/deps/rabbitmq_mqtt/priv/schema/rabbitmq_mqtt.schema b/deps/rabbitmq_mqtt/priv/schema/rabbitmq_mqtt.schema new file mode 100644 index 0000000000..317f5bb04f --- /dev/null +++ b/deps/rabbitmq_mqtt/priv/schema/rabbitmq_mqtt.schema @@ -0,0 +1,259 @@ +%% ---------------------------------------------------------------------------- +%% RabbitMQ MQTT Adapter +%% +%% See https://github.com/rabbitmq/rabbitmq-mqtt/blob/stable/README.md +%% for details +%% ---------------------------------------------------------------------------- + +% {rabbitmq_mqtt, +% [%% Set the default user name and password. Will be used as the default login +%% if a connecting client provides no other login details. +%% +%% Please note that setting this will allow clients to connect without +%% authenticating! +%% +%% {default_user, <<"guest">>}, +%% {default_pass, <<"guest">>}, + +{mapping, "mqtt.default_user", "rabbitmq_mqtt.default_user", [ + {datatype, string} +]}. + +{mapping, "mqtt.default_pass", "rabbitmq_mqtt.default_pass", [ + {datatype, string} +]}. + +{translation, "rabbitmq_mqtt.default_user", +fun(Conf) -> + list_to_binary(cuttlefish:conf_get("mqtt.default_user", Conf)) +end}. + +{translation, "rabbitmq_mqtt.default_pass", +fun(Conf) -> + list_to_binary(cuttlefish:conf_get("mqtt.default_pass", Conf)) +end}. + +%% Enable anonymous access. If this is set to false, clients MUST provide +%% login information in order to connect. See the default_user/default_pass +%% configuration elements for managing logins without authentication. +%% +%% {allow_anonymous, true}, + +{mapping, "mqtt.allow_anonymous", "rabbitmq_mqtt.allow_anonymous", + [{datatype, {enum, [true, false]}}]}. + +%% If you have multiple chosts, specify the one to which the +%% adapter connects. +%% +%% {vhost, <<"/">>}, + +{mapping, "mqtt.vhost", "rabbitmq_mqtt.vhost", [{datatype, string}]}. + +{translation, "rabbitmq_mqtt.vhost", +fun(Conf) -> + list_to_binary(cuttlefish:conf_get("mqtt.vhost", Conf)) +end}. + +%% Specify the exchange to which messages from MQTT clients are published. +%% +%% {exchange, <<"amq.topic">>}, + +{mapping, "mqtt.exchange", "rabbitmq_mqtt.exchange", [{datatype, string}]}. + +{translation, "rabbitmq_mqtt.exchange", +fun(Conf) -> + list_to_binary(cuttlefish:conf_get("mqtt.exchange", Conf)) +end}. + +%% Specify TTL (time to live) to control the lifetime of non-clean sessions. +%% +%% {subscription_ttl, 1800000}, +{mapping, "mqtt.subscription_ttl", "rabbitmq_mqtt.subscription_ttl", [ + {datatype, [{enum, [undefined, infinity]}, integer]} +]}. + +{translation, "rabbitmq_mqtt.subscription_ttl", +fun(Conf) -> + case cuttlefish:conf_get("mqtt.subscription_ttl", Conf, undefined) of + undefined -> undefined; + infinity -> undefined; + Ms -> Ms + end +end}. + +%% Set the prefetch count (governing the maximum number of unacknowledged +%% messages that will be delivered). +%% +%% {prefetch, 10}, +{mapping, "mqtt.prefetch", "rabbitmq_mqtt.prefetch", + [{datatype, integer}]}. + +%% Enable "Sparkplug B" namespace recognition so that the dot in the +%% namespace is not translated to a slash +%% +%% {sparkplug, true}, +{mapping, "mqtt.sparkplug", "rabbitmq_mqtt.sparkplug", + [{datatype, {enum, [true, false]}}]}. + +{mapping, "mqtt.retained_message_store", "rabbitmq_mqtt.retained_message_store", + [{datatype, atom}]}. + +{mapping, "mqtt.retained_message_store_dets_sync_interval", "rabbitmq_mqtt.retained_message_store_dets_sync_interval", + [{datatype, integer}]}. + +%% Whether or not to enable proxy protocol support. +%% +%% {proxy_protocol, false} + +{mapping, "mqtt.proxy_protocol", "rabbitmq_mqtt.proxy_protocol", + [{datatype, {enum, [true, false]}}]}. + +%% TCP/SSL Configuration (as per the broker configuration). +%% +%% {tcp_listeners, [1883]}, +%% {ssl_listeners, []}, + +{mapping, "mqtt.listeners.tcp", "rabbitmq_mqtt.tcp_listeners",[ + {datatype, {enum, [none]}} +]}. + +{mapping, "mqtt.listeners.tcp.$name", "rabbitmq_mqtt.tcp_listeners",[ + {datatype, [integer, ip]} +]}. + +{translation, "rabbitmq_mqtt.tcp_listeners", +fun(Conf) -> + case cuttlefish:conf_get("mqtt.listeners.tcp", Conf, undefined) of + none -> []; + _ -> + Settings = cuttlefish_variable:filter_by_prefix("mqtt.listeners.tcp", Conf), + [ V || {_, V} <- Settings ] + end +end}. + +{mapping, "mqtt.listeners.ssl", "rabbitmq_mqtt.ssl_listeners",[ + {datatype, {enum, [none]}} +]}. + +{mapping, "mqtt.listeners.ssl.$name", "rabbitmq_mqtt.ssl_listeners",[ + {datatype, [integer, ip]} +]}. + +{translation, "rabbitmq_mqtt.ssl_listeners", +fun(Conf) -> + case cuttlefish:conf_get("mqtt.listeners.ssl", Conf, undefined) of + none -> []; + _ -> + Settings = cuttlefish_variable:filter_by_prefix("mqtt.listeners.ssl", Conf), + [ V || {_, V} <- Settings ] + end +end}. + +%% Number of Erlang processes that will accept connections for the TCP +%% and SSL listeners. +%% +%% {num_tcp_acceptors, 10}, +%% {num_ssl_acceptors, 10}, + +{mapping, "mqtt.num_acceptors.ssl", "rabbitmq_mqtt.num_ssl_acceptors", [ + {datatype, integer} +]}. + +{mapping, "mqtt.num_acceptors.tcp", "rabbitmq_mqtt.num_tcp_acceptors", [ + {datatype, integer} +]}. + +{mapping, "mqtt.ssl_cert_login", "rabbitmq_mqtt.ssl_cert_login", [ + {datatype, {enum, [true, false]}}]}. + + +%% TCP/Socket options (as per the broker configuration). +%% +%% {tcp_listen_options, [{backlog, 128}, +%% {nodelay, true}]} +% ]}, + +%% TCP listener section ====================================================== + +{mapping, "mqtt.tcp_listen_options", "rabbitmq_mqtt.rabbit.tcp_listen_options", [ + {datatype, {enum, [none]}}]}. + +{translation, "rabbitmq_mqtt.rabbit.tcp_listen_options", +fun(Conf) -> + case cuttlefish:conf_get("mqtt.tcp_listen_options") of + none -> []; + _ -> cuttlefish:invalid("Invalid mqtt.tcp_listen_options") + end +end}. + +{mapping, "mqtt.tcp_listen_options.backlog", "rabbitmq_mqtt.tcp_listen_options.backlog", [ + {datatype, integer} +]}. + +{mapping, "mqtt.tcp_listen_options.nodelay", "rabbitmq_mqtt.tcp_listen_options.nodelay", [ + {datatype, {enum, [true, false]}} +]}. + +{mapping, "mqtt.tcp_listen_options.buffer", "rabbitmq_mqtt.tcp_listen_options.buffer", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.delay_send", "rabbitmq_mqtt.tcp_listen_options.delay_send", + [{datatype, {enum, [true, false]}}]}. + +{mapping, "mqtt.tcp_listen_options.dontroute", "rabbitmq_mqtt.tcp_listen_options.dontroute", + [{datatype, {enum, [true, false]}}]}. + +{mapping, "mqtt.tcp_listen_options.exit_on_close", "rabbitmq_mqtt.tcp_listen_options.exit_on_close", + [{datatype, {enum, [true, false]}}]}. + +{mapping, "mqtt.tcp_listen_options.fd", "rabbitmq_mqtt.tcp_listen_options.fd", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.high_msgq_watermark", "rabbitmq_mqtt.tcp_listen_options.high_msgq_watermark", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.high_watermark", "rabbitmq_mqtt.tcp_listen_options.high_watermark", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.keepalive", "rabbitmq_mqtt.tcp_listen_options.keepalive", + [{datatype, {enum, [true, false]}}]}. + +{mapping, "mqtt.tcp_listen_options.low_msgq_watermark", "rabbitmq_mqtt.tcp_listen_options.low_msgq_watermark", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.low_watermark", "rabbitmq_mqtt.tcp_listen_options.low_watermark", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.port", "rabbitmq_mqtt.tcp_listen_options.port", + [{datatype, integer}, {validators, ["port"]}]}. + +{mapping, "mqtt.tcp_listen_options.priority", "rabbitmq_mqtt.tcp_listen_options.priority", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.recbuf", "rabbitmq_mqtt.tcp_listen_options.recbuf", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.send_timeout", "rabbitmq_mqtt.tcp_listen_options.send_timeout", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.send_timeout_close", "rabbitmq_mqtt.tcp_listen_options.send_timeout_close", + [{datatype, {enum, [true, false]}}]}. + +{mapping, "mqtt.tcp_listen_options.sndbuf", "rabbitmq_mqtt.tcp_listen_options.sndbuf", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.tos", "rabbitmq_mqtt.tcp_listen_options.tos", + [{datatype, integer}]}. + +{mapping, "mqtt.tcp_listen_options.linger.on", "rabbitmq_mqtt.tcp_listen_options.linger", + [{datatype, {enum, [true, false]}}]}. + +{mapping, "mqtt.tcp_listen_options.linger.timeout", "rabbitmq_mqtt.tcp_listen_options.linger", + [{datatype, integer}, {validators, ["non_negative_integer"]}]}. + +{translation, "rabbitmq_mqtt.tcp_listen_options.linger", +fun(Conf) -> + LingerOn = cuttlefish:conf_get("mqtt.tcp_listen_options.linger.on", Conf, false), + LingerTimeout = cuttlefish:conf_get("mqtt.tcp_listen_options.linger.timeout", Conf, 0), + {LingerOn, LingerTimeout} +end}. diff --git a/deps/rabbitmq_mqtt/rabbitmq-components.mk b/deps/rabbitmq_mqtt/rabbitmq-components.mk new file mode 100644 index 0000000000..b2a3be8b35 --- /dev/null +++ b/deps/rabbitmq_mqtt/rabbitmq-components.mk @@ -0,0 +1,359 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# PROJECT_VERSION defaults to: +# 1. the version exported by rabbitmq-server-release; +# 2. the version stored in `git-revisions.txt`, if it exists; +# 3. a version based on git-describe(1), if it is a Git clone; +# 4. 0.0.0 + +PROJECT_VERSION := $(RABBITMQ_VERSION) + +ifeq ($(PROJECT_VERSION),) +PROJECT_VERSION := $(shell \ +if test -f git-revisions.txt; then \ + head -n1 git-revisions.txt | \ + awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \ +else \ + (git describe --dirty --abbrev=7 --tags --always --first-parent \ + 2>/dev/null || echo rabbitmq_v0_0_0) | \ + sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \ + -e 's/-/./g'; \ +fi) +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master +dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# Third-party dependencies version pinning. +# +# We do that in this file, which is copied in all projects, to ensure +# all projects use the same versions. It avoids conflicts and makes it +# possible to work with rabbitmq-public-umbrella. + +dep_accept = hex 0.3.5 +dep_cowboy = hex 2.8.0 +dep_cowlib = hex 2.9.1 +dep_jsx = hex 2.11.0 +dep_lager = hex 3.8.0 +dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master +dep_ra = git https://github.com/rabbitmq/ra.git master +dep_ranch = hex 1.7.1 +dep_recon = hex 2.5.1 +dep_observer_cli = hex 1.5.4 +dep_stdout_formatter = hex 0.2.4 +dep_sysmon_handler = hex 1.3.0 + +RABBITMQ_COMPONENTS = amqp_client \ + amqp10_common \ + amqp10_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_cache \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_backend_oauth2 \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_aws \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_cli \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_ct_client_helpers \ + rabbitmq_ct_helpers \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_jms_client \ + rabbitmq_jms_cts \ + rabbitmq_jms_topic_exchange \ + rabbitmq_lvc_exchange \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_objc_client \ + rabbitmq_peer_discovery_aws \ + rabbitmq_peer_discovery_common \ + rabbitmq_peer_discovery_consul \ + rabbitmq_peer_discovery_etcd \ + rabbitmq_peer_discovery_k8s \ + rabbitmq_prometheus \ + rabbitmq_random_exchange \ + rabbitmq_recent_history_exchange \ + rabbitmq_routing_node_stamp \ + rabbitmq_rtopic_exchange \ + rabbitmq_server_release \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_stream \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_trust_store \ + rabbitmq_web_dispatch \ + rabbitmq_web_mqtt \ + rabbitmq_web_mqtt_examples \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Erlang.mk does not rebuild dependencies by default, once they were +# compiled once, except for those listed in the `$(FORCE_REBUILD)` +# variable. +# +# We want all RabbitMQ components to always be rebuilt: this eases +# the work on several components at the same time. + +FORCE_REBUILD = $(RABBITMQ_COMPONENTS) + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +possible_base_rmq_ref := master +ifeq ($(possible_base_rmq_ref),$(current_rmq_ref)) +base_rmq_ref := $(current_rmq_ref) +else +base_rmq_ref := $(shell \ + (git rev-parse --verify -q master >/dev/null && \ + git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \ + echo $(possible_base_rmq_ref)) || \ + echo master) +endif +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Macro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Umbrella-specific settings. +# -------------------------------------------------------------------- + +# If the top-level project is a RabbitMQ component, we override +# $(DEPS_DIR) for this project to point to the top-level's one. +# +# We also verify that the guessed DEPS_DIR is actually named `deps`, +# to rule out any situation where it is a coincidence that we found a +# `rabbitmq-components.mk` up upper directories. + +possible_deps_dir_1 = $(abspath ..) +possible_deps_dir_2 = $(abspath ../../..) + +ifeq ($(notdir $(possible_deps_dir_1)),deps) +ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),) +deps_dir_overriden = 1 +DEPS_DIR ?= $(possible_deps_dir_1) +DISABLE_DISTCLEAN = 1 +endif +endif + +ifeq ($(deps_dir_overriden),) +ifeq ($(notdir $(possible_deps_dir_2)),deps) +ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),) +deps_dir_overriden = 1 +DEPS_DIR ?= $(possible_deps_dir_2) +DISABLE_DISTCLEAN = 1 +endif +endif +endif + +ifneq ($(wildcard UMBRELLA.md),) +DISABLE_DISTCLEAN = 1 +endif + +# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed. + +ifeq ($(DISABLE_DISTCLEAN),1) +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif diff --git a/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand.erl b/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand.erl new file mode 100644 index 0000000000..f0aefb526b --- /dev/null +++ b/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand.erl @@ -0,0 +1,68 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. + +-module('Elixir.RabbitMQ.CLI.Ctl.Commands.DecommissionMqttNodeCommand'). + +-include("rabbit_mqtt.hrl"). + +-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour'). + +-export([scopes/0, + switches/0, + aliases/0, + usage/0, + usage_doc_guides/0, + banner/2, + validate/2, + merge_defaults/2, + run/2, + output/2, + description/0, + help_section/0]). + +scopes() -> [ctl]. +switches() -> []. +aliases() -> []. + +description() -> <<"Removes cluster member and permanently deletes its cluster-wide MQTT state">>. + +help_section() -> + {plugin, mqtt}. + +validate([], _Opts) -> + {validation_failure, not_enough_args}; +validate([_, _ | _], _Opts) -> + {validation_failure, too_many_args}; +validate([_], _) -> + ok. + +merge_defaults(Args, Opts) -> + {Args, Opts}. + +usage() -> + <<"decommission_mqtt_node <node>">>. + +usage_doc_guides() -> + [?MQTT_GUIDE_URL]. + +run([Node], #{node := NodeName, + timeout := Timeout}) -> + case rabbit_misc:rpc_call(NodeName, rabbit_mqtt_collector, leave, [Node], Timeout) of + {badrpc, _} = Error -> + Error; + nodedown -> + {ok, list_to_binary(io_lib:format("Node ~s is down but has been successfully removed" + " from the cluster", [Node]))}; + Result -> + %% 'ok' or 'timeout' + %% TODO: Ra will timeout if the node is not a cluster member - should this be fixed?? + Result + end. + +banner([Node], _) -> list_to_binary(io_lib:format("Removing node ~s from the list of MQTT nodes...", [Node])). + +output(Result, _Opts) -> + 'Elixir.RabbitMQ.CLI.DefaultOutput':output(Result). diff --git a/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand.erl b/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand.erl new file mode 100644 index 0000000000..a5745a7f58 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand.erl @@ -0,0 +1,87 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. + +-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand'). + +-include("rabbit_mqtt.hrl"). + +-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour'). + +-export([formatter/0, + scopes/0, + switches/0, + aliases/0, + usage/0, + usage_additional/0, + usage_doc_guides/0, + banner/2, + validate/2, + merge_defaults/2, + run/2, + output/2, + description/0, + help_section/0]). + +formatter() -> 'Elixir.RabbitMQ.CLI.Formatters.Table'. +scopes() -> [ctl, diagnostics]. +switches() -> [{verbose, boolean}]. +aliases() -> [{'V', verbose}]. + +description() -> <<"Lists MQTT connections on the target node">>. + +help_section() -> + {plugin, mqtt}. + +validate(Args, _) -> + case 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':validate_info_keys(Args, + ?INFO_ITEMS) of + {ok, _} -> ok; + Error -> Error + end. + +merge_defaults([], Opts) -> + merge_defaults([<<"client_id">>, <<"conn_name">>], Opts); +merge_defaults(Args, Opts) -> + {Args, maps:merge(#{verbose => false}, Opts)}. + +usage() -> + <<"list_mqtt_connections [<column> ...]">>. + +usage_additional() -> + Prefix = <<" must be one of ">>, + InfoItems = 'Elixir.Enum':join(lists:usort(?INFO_ITEMS), <<", ">>), + [ + {<<"<column>">>, <<Prefix/binary, InfoItems/binary>>} + ]. + +usage_doc_guides() -> + [?MQTT_GUIDE_URL]. + +run(Args, #{node := NodeName, + timeout := Timeout, + verbose := Verbose}) -> + InfoKeys = case Verbose of + true -> ?INFO_ITEMS; + false -> 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':prepare_info_keys(Args) + end, + + %% a node uses the Raft-based collector to list connections, which knows about all connections in the cluster + %% so no need to reach out to all the nodes + Nodes = [NodeName], + + 'Elixir.RabbitMQ.CLI.Ctl.RpcStream':receive_list_items( + NodeName, + rabbit_mqtt, + emit_connection_info_all, + [Nodes, InfoKeys], + Timeout, + InfoKeys, + length(Nodes)). + +banner(_, _) -> <<"Listing MQTT connections ...">>. + +output(Result, _Opts) -> + 'Elixir.RabbitMQ.CLI.DefaultOutput':output(Result). diff --git a/deps/rabbitmq_mqtt/src/mqtt_machine.erl b/deps/rabbitmq_mqtt/src/mqtt_machine.erl new file mode 100644 index 0000000000..334aa9e32c --- /dev/null +++ b/deps/rabbitmq_mqtt/src/mqtt_machine.erl @@ -0,0 +1,134 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% +-module(mqtt_machine). +-behaviour(ra_machine). + +-include("mqtt_machine.hrl"). + +-export([init/1, + apply/3, + state_enter/2, + notify_connection/2]). + +-type state() :: #machine_state{}. + +-type config() :: map(). + +-type reply() :: {ok, term()} | {error, term()}. +-type client_id() :: term(). + +-type command() :: {register, client_id(), pid()} | + {unregister, client_id(), pid()} | + list. + +-spec init(config()) -> state(). +init(_Conf) -> + #machine_state{}. + +-spec apply(map(), command(), state()) -> + {state(), reply(), ra_machine:effects()}. +apply(_Meta, {register, ClientId, Pid}, #machine_state{client_ids = Ids} = State0) -> + {Effects, Ids1} = + case maps:find(ClientId, Ids) of + {ok, OldPid} when Pid =/= OldPid -> + Effects0 = [{demonitor, process, OldPid}, + {monitor, process, Pid}, + {mod_call, ?MODULE, notify_connection, [OldPid, duplicate_id]}], + {Effects0, maps:remove(ClientId, Ids)}; + _ -> + Effects0 = [{monitor, process, Pid}], + {Effects0, Ids} + end, + State = State0#machine_state{client_ids = maps:put(ClientId, Pid, Ids1)}, + {State, ok, Effects}; + +apply(Meta, {unregister, ClientId, Pid}, #machine_state{client_ids = Ids} = State0) -> + State = case maps:find(ClientId, Ids) of + {ok, Pid} -> State0#machine_state{client_ids = maps:remove(ClientId, Ids)}; + %% don't delete client id that might belong to a newer connection + %% that kicked the one with Pid out + {ok, _AnotherPid} -> State0; + error -> State0 + end, + Effects0 = [{demonitor, process, Pid}], + %% snapshot only when the map has changed + Effects = case State of + State0 -> Effects0; + _ -> Effects0 ++ snapshot_effects(Meta, State) + end, + {State, ok, Effects}; + +apply(_Meta, {down, DownPid, noconnection}, State) -> + %% Monitor the node the pid is on (see {nodeup, Node} below) + %% so that we can detect when the node is re-connected and discover the + %% actual fate of the connection processes on it + Effect = {monitor, node, node(DownPid)}, + {State, ok, Effect}; + +apply(Meta, {down, DownPid, _}, #machine_state{client_ids = Ids} = State0) -> + Ids1 = maps:filter(fun (_ClientId, Pid) when Pid =:= DownPid -> + false; + (_, _) -> + true + end, Ids), + State = State0#machine_state{client_ids = Ids1}, + Delta = maps:keys(Ids) -- maps:keys(Ids1), + Effects = lists:map(fun(Id) -> + [{mod_call, rabbit_log, debug, + ["MQTT connection with client id '~s' failed", [Id]]}] end, Delta), + {State, ok, Effects ++ snapshot_effects(Meta, State)}; + +apply(_Meta, {nodeup, Node}, State) -> + %% Work out if any pids that were disconnected are still + %% alive. + %% Re-request the monitor for the pids on the now-back node. + Effects = [{monitor, process, Pid} || Pid <- all_pids(State), node(Pid) == Node], + {State, ok, Effects}; +apply(_Meta, {nodedown, _Node}, State) -> + {State, ok}; + +apply(Meta, {leave, Node}, #machine_state{client_ids = Ids} = State0) -> + Ids1 = maps:filter(fun (_ClientId, Pid) -> node(Pid) =/= Node end, Ids), + Delta = maps:keys(Ids) -- maps:keys(Ids1), + + Effects = lists:foldl(fun (ClientId, Acc) -> + Pid = maps:get(ClientId, Ids), + [ + {demonitor, process, Pid}, + {mod_call, ?MODULE, notify_connection, [Pid, decommission_node]}, + {mod_call, rabbit_log, debug, + ["MQTT will remove client ID '~s' from known " + "as its node has been decommissioned", [ClientId]]} + ] ++ Acc + end, [], Delta), + + State = State0#machine_state{client_ids = Ids1}, + {State, ok, Effects ++ snapshot_effects(Meta, State)}; + +apply(_Meta, Unknown, State) -> + error_logger:error_msg("MQTT Raft state machine received unknown command ~p~n", [Unknown]), + {State, {error, {unknown_command, Unknown}}, []}. + +state_enter(leader, State) -> + %% re-request monitors for all known pids, this would clean up + %% records for all connections are no longer around, e.g. right after node restart + [{monitor, process, Pid} || Pid <- all_pids(State)]; +state_enter(_, _) -> + []. + +%% ========================== + +%% Avoids blocking the Raft leader. +notify_connection(Pid, Reason) -> + spawn(fun() -> gen_server2:cast(Pid, Reason) end). + +-spec snapshot_effects(map(), state()) -> ra_machine:effects(). +snapshot_effects(#{index := RaftIdx}, State) -> + [{release_cursor, RaftIdx, State}]. + +all_pids(#machine_state{client_ids = Ids}) -> + maps:values(Ids). diff --git a/deps/rabbitmq_mqtt/src/mqtt_node.erl b/deps/rabbitmq_mqtt/src/mqtt_node.erl new file mode 100644 index 0000000000..84dcd9b3a4 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/mqtt_node.erl @@ -0,0 +1,132 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% +-module(mqtt_node). + +-export([start/0, node_id/0, server_id/0, all_node_ids/0, leave/1, trigger_election/0]). + +-define(ID_NAME, mqtt_node). +-define(START_TIMEOUT, 100000). +-define(RETRY_INTERVAL, 5000). +-define(RA_OPERATION_TIMEOUT, 60000). + +node_id() -> + server_id(node()). + +server_id() -> + server_id(node()). + +server_id(Node) -> + {?ID_NAME, Node}. + +all_node_ids() -> + [server_id(N) || N <- rabbit_mnesia:cluster_nodes(all), + can_participate_in_clientid_tracking(N)]. + +start() -> + %% 3s to 6s randomized + Repetitions = rand:uniform(10) + 10, + start(300, Repetitions). + +start(_Delay, AttemptsLeft) when AttemptsLeft =< 0 -> + start_server(), + trigger_election(); +start(Delay, AttemptsLeft) -> + NodeId = server_id(), + Nodes = compatible_peer_servers(), + case ra_directory:uid_of(?ID_NAME) of + undefined -> + case Nodes of + [] -> + %% Since cluster members are not known ahead of time and initial boot can be happening in parallel, + %% we wait and check a few times (up to a few seconds) to see if we can discover any peers to + %% join before forming a cluster. This reduces the probability of N independent clusters being + %% formed in the common scenario of N nodes booting in parallel e.g. because they were started + %% at the same time by a deployment tool. + %% + %% This scenario does not guarantee single cluster formation but without knowing the list of members + %% ahead of time, this is a best effort workaround. Multi-node consensus is apparently hard + %% to achieve without having consensus around expected cluster members. + rabbit_log:info("MQTT: will wait for ~p more ms for cluster members to join before triggering a Raft leader election", [Delay]), + timer:sleep(Delay), + start(Delay, AttemptsLeft - 1); + Peers -> + %% Trigger an election. + %% This is required when we start a node for the first time. + %% Using default timeout because it supposed to reply fast. + rabbit_log:info("MQTT: discovered ~p cluster peers that support client ID tracking", [length(Peers)]), + start_server(), + join_peers(NodeId, Peers), + ra:trigger_election(NodeId, ?RA_OPERATION_TIMEOUT) + end; + _ -> + join_peers(NodeId, Nodes), + ra:restart_server(NodeId), + ra:trigger_election(NodeId) + end, + ok. + +compatible_peer_servers() -> + all_node_ids() -- [(node_id())]. + +start_server() -> + NodeId = node_id(), + Nodes = compatible_peer_servers(), + UId = ra:new_uid(ra_lib:to_binary(?ID_NAME)), + Timeout = application:get_env(kernel, net_ticktime, 60) + 5, + Conf = #{cluster_name => ?ID_NAME, + id => NodeId, + uid => UId, + friendly_name => ?ID_NAME, + initial_members => Nodes, + log_init_args => #{uid => UId}, + tick_timeout => Timeout, + machine => {module, mqtt_machine, #{}} + }, + ra:start_server(Conf). + +trigger_election() -> + ra:trigger_election(server_id()). + +join_peers(_NodeId, []) -> + ok; +join_peers(NodeId, Nodes) -> + join_peers(NodeId, Nodes, 100). +join_peers(_NodeId, [], _RetriesLeft) -> + ok; +join_peers(_NodeId, _Nodes, RetriesLeft) when RetriesLeft =:= 0 -> + rabbit_log:error("MQTT: exhausted all attempts while trying to rejoin cluster peers"); +join_peers(NodeId, Nodes, RetriesLeft) -> + case ra:members(Nodes, ?START_TIMEOUT) of + {ok, Members, _} -> + case lists:member(NodeId, Members) of + true -> ok; + false -> ra:add_member(Members, NodeId) + end; + {timeout, _} -> + rabbit_log:debug("MQTT: timed out contacting cluster peers, %s retries left", [RetriesLeft]), + timer:sleep(?RETRY_INTERVAL), + join_peers(NodeId, Nodes, RetriesLeft - 1); + Err -> + Err + end. + +-spec leave(node()) -> 'ok' | 'timeout' | 'nodedown'. +leave(Node) -> + NodeId = server_id(), + ToLeave = server_id(Node), + try + ra:leave_and_delete_server(NodeId, ToLeave) + catch + exit:{{nodedown, Node}, _} -> + nodedown + end. + +can_participate_in_clientid_tracking(Node) -> + case rpc:call(Node, mqtt_machine, module_info, []) of + {badrpc, _} -> false; + _ -> true + end. diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt.erl new file mode 100644 index 0000000000..192f8a7fee --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt.erl @@ -0,0 +1,55 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt). + +-behaviour(application). +-export([start/2, stop/1]). +-export([connection_info_local/1, + emit_connection_info_local/3, + emit_connection_info_all/4, + close_all_client_connections/1]). + +start(normal, []) -> + {ok, Listeners} = application:get_env(tcp_listeners), + {ok, SslListeners} = application:get_env(ssl_listeners), + ok = mqtt_node:start(), + Result = rabbit_mqtt_sup:start_link({Listeners, SslListeners}, []), + EMPid = case rabbit_event:start_link() of + {ok, Pid} -> Pid; + {error, {already_started, Pid}} -> Pid + end, + gen_event:add_handler(EMPid, rabbit_mqtt_internal_event_handler, []), + Result. + +stop(_) -> + rabbit_mqtt_sup:stop_listeners(). + +-spec close_all_client_connections(string() | binary()) -> {'ok', non_neg_integer()}. +close_all_client_connections(Reason) -> + Connections = rabbit_mqtt_collector:list(), + [rabbit_mqtt_reader:close_connection(Pid, Reason) || {_, Pid} <- Connections], + {ok, length(Connections)}. + +emit_connection_info_all(Nodes, Items, Ref, AggregatorPid) -> + Pids = [spawn_link(Node, rabbit_mqtt, emit_connection_info_local, + [Items, Ref, AggregatorPid]) + || Node <- Nodes], + rabbit_control_misc:await_emitters_termination(Pids), + ok. + +emit_connection_info_local(Items, Ref, AggregatorPid) -> + rabbit_control_misc:emitting_map_with_exit_handler( + AggregatorPid, Ref, fun({_, Pid}) -> + rabbit_mqtt_reader:info(Pid, Items) + end, + rabbit_mqtt_collector:list()). + +connection_info_local(Items) -> + Connections = rabbit_mqtt_collector:list(), + [rabbit_mqtt_reader:info(Pid, Items) + || {_, Pid} <- Connections]. diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl new file mode 100644 index 0000000000..341ee46850 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_collector.erl @@ -0,0 +1,88 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_collector). + +-include("mqtt_machine.hrl"). + +-export([register/2, register/3, unregister/2, list/0, leave/1]). + +%%---------------------------------------------------------------------------- +-spec register(term(), pid()) -> {ok, reference()} | {error, term()}. +register(ClientId, Pid) -> + {ClusterName, _} = NodeId = mqtt_node:server_id(), + case ra_leaderboard:lookup_leader(ClusterName) of + undefined -> + case ra:members(NodeId) of + {ok, _, Leader} -> + register(Leader, ClientId, Pid); + _ = Error -> + Error + end; + Leader -> + register(Leader, ClientId, Pid) + end. + +-spec register(ra:server_id(), term(), pid()) -> + {ok, reference()} | {error, term()}. +register(ServerId, ClientId, Pid) -> + Corr = make_ref(), + send_ra_command(ServerId, {register, ClientId, Pid}, Corr), + erlang:send_after(5000, self(), {ra_event, undefined, register_timeout}), + {ok, Corr}. + +unregister(ClientId, Pid) -> + {ClusterName, _} = mqtt_node:server_id(), + case ra_leaderboard:lookup_leader(ClusterName) of + undefined -> + ok; + Leader -> + send_ra_command(Leader, {unregister, ClientId, Pid}, no_correlation) + end. + +list() -> + {ClusterName, _} = mqtt_node:server_id(), + QF = fun (#machine_state{client_ids = Ids}) -> maps:to_list(Ids) end, + case ra_leaderboard:lookup_leader(ClusterName) of + undefined -> + NodeIds = mqtt_node:all_node_ids(), + case ra:leader_query(NodeIds, QF) of + {ok, {_, Ids}, _} -> Ids; + {timeout, _} -> + rabbit_log:debug("~s:list/0 leader query timed out", + [?MODULE]), + [] + end; + Leader -> + case ra:leader_query(Leader, QF) of + {ok, {_, Ids}, _} -> Ids; + {error, _} -> + []; + {timeout, _} -> + rabbit_log:debug("~s:list/0 leader query timed out", + [?MODULE]), + [] + end + end. + +leave(NodeBin) -> + Node = binary_to_atom(NodeBin, utf8), + ServerId = mqtt_node:server_id(), + run_ra_command(ServerId, {leave, Node}), + mqtt_node:leave(Node). + +%%---------------------------------------------------------------------------- +-spec run_ra_command(term(), term()) -> term() | {error, term()}. +run_ra_command(ServerId, RaCommand) -> + case ra:process_command(ServerId, RaCommand) of + {ok, Result, _} -> Result; + _ = Error -> Error + end. + +-spec send_ra_command(term(), term(), term()) -> ok. +send_ra_command(ServerId, RaCommand, Correlation) -> + ok = ra:pipeline_command(ServerId, RaCommand, Correlation, normal). diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_info.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_info.erl new file mode 100644 index 0000000000..4e73a19253 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_info.erl @@ -0,0 +1,25 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2017-2020 VMware, Inc. or its affiliates. All rights reserved. +%% +-module(rabbit_mqtt_connection_info). + +%% Module to add the MQTT client ID to authentication properties + +%% API +-export([additional_authn_params/4]). + +additional_authn_params(_Creds, _VHost, _Pid, Infos) -> + case proplists:get_value(variable_map, Infos, undefined) of + VariableMap when is_map(VariableMap) -> + case maps:get(<<"client_id">>, VariableMap, []) of + ClientId when is_binary(ClientId)-> + [{client_id, ClientId}]; + [] -> + [] + end; + _ -> + [] + end. diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl new file mode 100644 index 0000000000..0a150caa38 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_connection_sup.erl @@ -0,0 +1,43 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_connection_sup). + +-behaviour(supervisor2). +-behaviour(ranch_protocol). + +-include_lib("rabbit_common/include/rabbit.hrl"). + +-export([start_link/4, start_keepalive_link/0]). + +-export([init/1]). + +%%---------------------------------------------------------------------------- + +start_link(Ref, _Sock, _Transport, []) -> + {ok, SupPid} = supervisor2:start_link(?MODULE, []), + {ok, KeepaliveSup} = supervisor2:start_child( + SupPid, + {rabbit_mqtt_keepalive_sup, + {rabbit_mqtt_connection_sup, start_keepalive_link, []}, + intrinsic, infinity, supervisor, [rabbit_keepalive_sup]}), + {ok, ReaderPid} = supervisor2:start_child( + SupPid, + {rabbit_mqtt_reader, + {rabbit_mqtt_reader, start_link, [KeepaliveSup, Ref]}, + intrinsic, ?WORKER_WAIT, worker, [rabbit_mqtt_reader]}), + {ok, SupPid, ReaderPid}. + +start_keepalive_link() -> + supervisor2:start_link(?MODULE, []). + +%%---------------------------------------------------------------------------- + +init([]) -> + {ok, {{one_for_all, 0, 1}, []}}. + + diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl new file mode 100644 index 0000000000..950c5bd6c4 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_frame.erl @@ -0,0 +1,224 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_frame). + +-export([parse/2, initial_state/0]). +-export([serialise/1]). + +-include("rabbit_mqtt_frame.hrl"). + +-define(RESERVED, 0). +-define(MAX_LEN, 16#fffffff). +-define(HIGHBIT, 2#10000000). +-define(LOWBITS, 2#01111111). + +initial_state() -> none. + +parse(<<>>, none) -> + {more, fun(Bin) -> parse(Bin, none) end}; +parse(<<MessageType:4, Dup:1, QoS:2, Retain:1, Rest/binary>>, none) -> + parse_remaining_len(Rest, #mqtt_frame_fixed{ type = MessageType, + dup = bool(Dup), + qos = QoS, + retain = bool(Retain) }); +parse(Bin, Cont) -> Cont(Bin). + +parse_remaining_len(<<>>, Fixed) -> + {more, fun(Bin) -> parse_remaining_len(Bin, Fixed) end}; +parse_remaining_len(Rest, Fixed) -> + parse_remaining_len(Rest, Fixed, 1, 0). + +parse_remaining_len(_Bin, _Fixed, _Multiplier, Length) + when Length > ?MAX_LEN -> + {error, invalid_mqtt_frame_len}; +parse_remaining_len(<<>>, Fixed, Multiplier, Length) -> + {more, fun(Bin) -> parse_remaining_len(Bin, Fixed, Multiplier, Length) end}; +parse_remaining_len(<<1:1, Len:7, Rest/binary>>, Fixed, Multiplier, Value) -> + parse_remaining_len(Rest, Fixed, Multiplier * ?HIGHBIT, Value + Len * Multiplier); +parse_remaining_len(<<0:1, Len:7, Rest/binary>>, Fixed, Multiplier, Value) -> + parse_frame(Rest, Fixed, Value + Len * Multiplier). + +parse_frame(Bin, #mqtt_frame_fixed{ type = Type, + qos = Qos } = Fixed, Length) -> + case {Type, Bin} of + {?CONNECT, <<FrameBin:Length/binary, Rest/binary>>} -> + {ProtoName, Rest1} = parse_utf(FrameBin), + <<ProtoVersion : 8, Rest2/binary>> = Rest1, + <<UsernameFlag : 1, + PasswordFlag : 1, + WillRetain : 1, + WillQos : 2, + WillFlag : 1, + CleanSession : 1, + _Reserved : 1, + KeepAlive : 16/big, + Rest3/binary>> = Rest2, + {ClientId, Rest4} = parse_utf(Rest3), + {WillTopic, Rest5} = parse_utf(Rest4, WillFlag), + {WillMsg, Rest6} = parse_msg(Rest5, WillFlag), + {UserName, Rest7} = parse_utf(Rest6, UsernameFlag), + {PasssWord, <<>>} = parse_utf(Rest7, PasswordFlag), + case protocol_name_approved(ProtoVersion, ProtoName) of + true -> + wrap(Fixed, + #mqtt_frame_connect{ + proto_ver = ProtoVersion, + will_retain = bool(WillRetain), + will_qos = WillQos, + will_flag = bool(WillFlag), + clean_sess = bool(CleanSession), + keep_alive = KeepAlive, + client_id = ClientId, + will_topic = WillTopic, + will_msg = WillMsg, + username = UserName, + password = PasssWord}, Rest); + false -> + {error, protocol_header_corrupt} + end; + {?PUBLISH, <<FrameBin:Length/binary, Rest/binary>>} -> + {TopicName, Rest1} = parse_utf(FrameBin), + {MessageId, Payload} = case Qos of + 0 -> {undefined, Rest1}; + _ -> <<M:16/big, R/binary>> = Rest1, + {M, R} + end, + wrap(Fixed, #mqtt_frame_publish { topic_name = TopicName, + message_id = MessageId }, + Payload, Rest); + {?PUBACK, <<FrameBin:Length/binary, Rest/binary>>} -> + <<MessageId:16/big>> = FrameBin, + wrap(Fixed, #mqtt_frame_publish { message_id = MessageId }, Rest); + {Subs, <<FrameBin:Length/binary, Rest/binary>>} + when Subs =:= ?SUBSCRIBE orelse Subs =:= ?UNSUBSCRIBE -> + 1 = Qos, + <<MessageId:16/big, Rest1/binary>> = FrameBin, + Topics = parse_topics(Subs, Rest1, []), + wrap(Fixed, #mqtt_frame_subscribe { message_id = MessageId, + topic_table = Topics }, Rest); + {Minimal, Rest} + when Minimal =:= ?DISCONNECT orelse Minimal =:= ?PINGREQ -> + Length = 0, + wrap(Fixed, Rest); + {_, TooShortBin} -> + {more, fun(BinMore) -> + parse_frame(<<TooShortBin/binary, BinMore/binary>>, + Fixed, Length) + end} + end. + +parse_topics(_, <<>>, Topics) -> + Topics; +parse_topics(?SUBSCRIBE = Sub, Bin, Topics) -> + {Name, <<_:6, QoS:2, Rest/binary>>} = parse_utf(Bin), + parse_topics(Sub, Rest, [#mqtt_topic { name = Name, qos = QoS } | Topics]); +parse_topics(?UNSUBSCRIBE = Sub, Bin, Topics) -> + {Name, <<Rest/binary>>} = parse_utf(Bin), + parse_topics(Sub, Rest, [#mqtt_topic { name = Name } | Topics]). + +wrap(Fixed, Variable, Payload, Rest) -> + {ok, #mqtt_frame { variable = Variable, fixed = Fixed, payload = Payload }, Rest}. +wrap(Fixed, Variable, Rest) -> + {ok, #mqtt_frame { variable = Variable, fixed = Fixed }, Rest}. +wrap(Fixed, Rest) -> + {ok, #mqtt_frame { fixed = Fixed }, Rest}. + +parse_utf(Bin, 0) -> + {undefined, Bin}; +parse_utf(Bin, _) -> + parse_utf(Bin). + +parse_utf(<<Len:16/big, Str:Len/binary, Rest/binary>>) -> + {binary_to_list(Str), Rest}. + +parse_msg(Bin, 0) -> + {undefined, Bin}; +parse_msg(<<Len:16/big, Msg:Len/binary, Rest/binary>>, _) -> + {Msg, Rest}. + +bool(0) -> false; +bool(1) -> true. + +%% serialisation + +serialise(#mqtt_frame{ fixed = Fixed, + variable = Variable, + payload = Payload }) -> + serialise_variable(Fixed, Variable, serialise_payload(Payload)). + +serialise_payload(undefined) -> <<>>; +serialise_payload(B) when is_binary(B) -> B. + +serialise_variable(#mqtt_frame_fixed { type = ?CONNACK } = Fixed, + #mqtt_frame_connack { session_present = SessionPresent, + return_code = ReturnCode }, + <<>> = PayloadBin) -> + VariableBin = <<?RESERVED:7, (opt(SessionPresent)):1, ReturnCode:8>>, + serialise_fixed(Fixed, VariableBin, PayloadBin); + +serialise_variable(#mqtt_frame_fixed { type = SubAck } = Fixed, + #mqtt_frame_suback { message_id = MessageId, + qos_table = Qos }, + <<>> = _PayloadBin) + when SubAck =:= ?SUBACK orelse SubAck =:= ?UNSUBACK -> + VariableBin = <<MessageId:16/big>>, + QosBin = << <<?RESERVED:6, Q:2>> || Q <- Qos >>, + serialise_fixed(Fixed, VariableBin, QosBin); + +serialise_variable(#mqtt_frame_fixed { type = ?PUBLISH, + qos = Qos } = Fixed, + #mqtt_frame_publish { topic_name = TopicName, + message_id = MessageId }, + PayloadBin) -> + TopicBin = serialise_utf(TopicName), + MessageIdBin = case Qos of + 0 -> <<>>; + 1 -> <<MessageId:16/big>> + end, + serialise_fixed(Fixed, <<TopicBin/binary, MessageIdBin/binary>>, PayloadBin); + +serialise_variable(#mqtt_frame_fixed { type = ?PUBACK } = Fixed, + #mqtt_frame_publish { message_id = MessageId }, + PayloadBin) -> + MessageIdBin = <<MessageId:16/big>>, + serialise_fixed(Fixed, MessageIdBin, PayloadBin); + +serialise_variable(#mqtt_frame_fixed {} = Fixed, + undefined, + <<>> = _PayloadBin) -> + serialise_fixed(Fixed, <<>>, <<>>). + +serialise_fixed(#mqtt_frame_fixed{ type = Type, + dup = Dup, + qos = Qos, + retain = Retain }, VariableBin, PayloadBin) + when is_integer(Type) andalso ?CONNECT =< Type andalso Type =< ?DISCONNECT -> + Len = size(VariableBin) + size(PayloadBin), + true = (Len =< ?MAX_LEN), + LenBin = serialise_len(Len), + <<Type:4, (opt(Dup)):1, (opt(Qos)):2, (opt(Retain)):1, + LenBin/binary, VariableBin/binary, PayloadBin/binary>>. + +serialise_utf(String) -> + StringBin = unicode:characters_to_binary(String), + Len = size(StringBin), + true = (Len =< 16#ffff), + <<Len:16/big, StringBin/binary>>. + +serialise_len(N) when N =< ?LOWBITS -> + <<0:1, N:7>>; +serialise_len(N) -> + <<1:1, (N rem ?HIGHBIT):7, (serialise_len(N div ?HIGHBIT))/binary>>. + +opt(undefined) -> ?RESERVED; +opt(false) -> 0; +opt(true) -> 1; +opt(X) when is_integer(X) -> X. + +protocol_name_approved(Ver, Name) -> + lists:member({Ver, Name}, ?PROTOCOL_NAMES). diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_internal_event_handler.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_internal_event_handler.erl new file mode 100644 index 0000000000..2a371b4142 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_internal_event_handler.erl @@ -0,0 +1,45 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_internal_event_handler). + +-behaviour(gen_event). + +-export([init/1, handle_event/2, handle_call/2, handle_info/2, terminate/2, code_change/3]). + +-import(rabbit_misc, [pget/2]). + +init([]) -> + {ok, []}. + +handle_event({event, vhost_created, Info, _, _}, State) -> + Name = pget(name, Info), + rabbit_mqtt_retainer_sup:child_for_vhost(Name), + {ok, State}; +handle_event({event, vhost_deleted, Info, _, _}, State) -> + Name = pget(name, Info), + rabbit_mqtt_retainer_sup:delete_child(Name), + {ok, State}; +handle_event({event, maintenance_connections_closed, _Info, _, _}, State) -> + %% we should close our connections + {ok, NConnections} = rabbit_mqtt:close_all_client_connections("node is being put into maintenance mode"), + rabbit_log:alert("Closed ~b local MQTT client connections", [NConnections]), + {ok, State}; +handle_event(_Event, State) -> + {ok, State}. + +handle_call(_Request, State) -> + {ok, State}. + +handle_info(_Info, State) -> + {ok, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl new file mode 100644 index 0000000000..c3a25096e6 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_processor.erl @@ -0,0 +1,1054 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_processor). + +-export([info/2, initial_state/2, initial_state/5, + process_frame/2, amqp_pub/2, amqp_callback/2, send_will/1, + close_connection/1, handle_pre_hibernate/0, + handle_ra_event/2]). + +%% for testing purposes +-export([get_vhost_username/1, get_vhost/3, get_vhost_from_user_mapping/2, + add_client_id_to_adapter_info/2]). + +-include_lib("amqp_client/include/amqp_client.hrl"). +-include("rabbit_mqtt_frame.hrl"). +-include("rabbit_mqtt.hrl"). + +-define(APP, rabbitmq_mqtt). +-define(FRAME_TYPE(Frame, Type), + Frame = #mqtt_frame{ fixed = #mqtt_frame_fixed{ type = Type }}). +-define(MAX_TOPIC_PERMISSION_CACHE_SIZE, 12). + +initial_state(Socket, SSLLoginName) -> + RealSocket = rabbit_net:unwrap_socket(Socket), + {ok, {PeerAddr, _PeerPort}} = rabbit_net:peername(RealSocket), + initial_state(RealSocket, SSLLoginName, + adapter_info(Socket, 'MQTT'), + fun serialise_and_send_to_client/2, PeerAddr). + +initial_state(Socket, SSLLoginName, + AdapterInfo0 = #amqp_adapter_info{additional_info = Extra}, + SendFun, PeerAddr) -> + {ok, {mqtt2amqp_fun, M2A}, {amqp2mqtt_fun, A2M}} = + rabbit_mqtt_util:get_topic_translation_funs(), + %% MQTT connections use exactly one channel. The frame max is not + %% applicable and there is no way to know what client is used. + AdapterInfo = AdapterInfo0#amqp_adapter_info{additional_info = [ + {channels, 1}, + {channel_max, 1}, + {frame_max, 0}, + {client_properties, + [{<<"product">>, longstr, <<"MQTT client">>}]} | Extra]}, + #proc_state{ unacked_pubs = gb_trees:empty(), + awaiting_ack = gb_trees:empty(), + message_id = 1, + subscriptions = #{}, + consumer_tags = {undefined, undefined}, + channels = {undefined, undefined}, + exchange = rabbit_mqtt_util:env(exchange), + socket = Socket, + adapter_info = AdapterInfo, + ssl_login_name = SSLLoginName, + send_fun = SendFun, + peer_addr = PeerAddr, + mqtt2amqp_fun = M2A, + amqp2mqtt_fun = A2M}. + +process_frame(#mqtt_frame{ fixed = #mqtt_frame_fixed{ type = Type }}, + PState = #proc_state{ connection = undefined } ) + when Type =/= ?CONNECT -> + {error, connect_expected, PState}; +process_frame(Frame = #mqtt_frame{ fixed = #mqtt_frame_fixed{ type = Type }}, + PState) -> + case process_request(Type, Frame, PState) of + {ok, PState1} -> {ok, PState1, PState1#proc_state.connection}; + Ret -> Ret + end. + +add_client_id_to_adapter_info(ClientId, #amqp_adapter_info{additional_info = AdditionalInfo0} = AdapterInfo) -> + AdditionalInfo1 = [{variable_map, #{<<"client_id">> => ClientId}} + | AdditionalInfo0], + ClientProperties = proplists:get_value(client_properties, AdditionalInfo1, []) + ++ [{client_id, longstr, ClientId}], + AdditionalInfo2 = case lists:keysearch(client_properties, 1, AdditionalInfo1) of + {value, _} -> + lists:keyreplace(client_properties, + 1, + AdditionalInfo1, + {client_properties, ClientProperties}); + false -> + [{client_properties, ClientProperties} | AdditionalInfo1] + end, + AdapterInfo#amqp_adapter_info{additional_info = AdditionalInfo2}. + +process_request(?CONNECT, + #mqtt_frame{ variable = #mqtt_frame_connect{ + username = Username, + password = Password, + proto_ver = ProtoVersion, + clean_sess = CleanSess, + client_id = ClientId0, + keep_alive = Keepalive} = Var}, + PState0 = #proc_state{ ssl_login_name = SSLLoginName, + send_fun = SendFun, + adapter_info = AdapterInfo, + peer_addr = Addr}) -> + ClientId = case ClientId0 of + [] -> rabbit_mqtt_util:gen_client_id(); + [_|_] -> ClientId0 + end, + rabbit_log_connection:debug("Received a CONNECT, client ID: ~p (expanded to ~p), username: ~p, " + "clean session: ~p, protocol version: ~p, keepalive: ~p", + [ClientId0, ClientId, Username, CleanSess, ProtoVersion, Keepalive]), + AdapterInfo1 = add_client_id_to_adapter_info(rabbit_data_coercion:to_binary(ClientId), AdapterInfo), + PState1 = PState0#proc_state{adapter_info = AdapterInfo1}, + Ip = list_to_binary(inet:ntoa(Addr)), + {Return, PState5} = + case {lists:member(ProtoVersion, proplists:get_keys(?PROTOCOL_NAMES)), + ClientId0 =:= [] andalso CleanSess =:= false} of + {false, _} -> + {?CONNACK_PROTO_VER, PState1}; + {_, true} -> + {?CONNACK_INVALID_ID, PState1}; + _ -> + case creds(Username, Password, SSLLoginName) of + nocreds -> + rabbit_core_metrics:auth_attempt_failed(Ip, <<>>, mqtt), + rabbit_log_connection:error("MQTT login failed: no credentials provided~n"), + {?CONNACK_CREDENTIALS, PState1}; + {invalid_creds, {undefined, Pass}} when is_list(Pass) -> + rabbit_core_metrics:auth_attempt_failed(Ip, <<>>, mqtt), + rabbit_log_connection:error("MQTT login failed: no username is provided"), + {?CONNACK_CREDENTIALS, PState1}; + {invalid_creds, {User, undefined}} when is_list(User) -> + rabbit_core_metrics:auth_attempt_failed(Ip, User, mqtt), + rabbit_log_connection:error("MQTT login failed for user '~p': no password provided", [User]), + {?CONNACK_CREDENTIALS, PState1}; + {UserBin, PassBin} -> + case process_login(UserBin, PassBin, ProtoVersion, PState1) of + connack_dup_auth -> + {SessionPresent0, PState2} = maybe_clean_sess(PState1), + {{?CONNACK_ACCEPT, SessionPresent0}, PState2}; + {?CONNACK_ACCEPT, Conn, VHost, AState} -> + case rabbit_mqtt_collector:register(ClientId, self()) of + {ok, Corr} -> + RetainerPid = rabbit_mqtt_retainer_sup:child_for_vhost(VHost), + link(Conn), + {ok, Ch} = amqp_connection:open_channel(Conn), + link(Ch), + amqp_channel:enable_delivery_flow_control(Ch), + Prefetch = rabbit_mqtt_util:env(prefetch), + #'basic.qos_ok'{} = amqp_channel:call(Ch, + #'basic.qos'{prefetch_count = Prefetch}), + rabbit_mqtt_reader:start_keepalive(self(), Keepalive), + PState3 = PState1#proc_state{ + will_msg = make_will_msg(Var), + clean_sess = CleanSess, + channels = {Ch, undefined}, + connection = Conn, + client_id = ClientId, + retainer_pid = RetainerPid, + auth_state = AState, + register_state = {pending, Corr}}, + {SessionPresent1, PState4} = maybe_clean_sess(PState3), + {{?CONNACK_ACCEPT, SessionPresent1}, PState4}; + %% e.g. this node was removed from the MQTT cluster members + {error, _} = Err -> + rabbit_log_connection:error("MQTT cannot accept a connection: " + "client ID tracker is unavailable: ~p", [Err]), + %% ignore all exceptions, we are shutting down + catch amqp_connection:close(Conn), + {?CONNACK_SERVER, PState1}; + {timeout, _} -> + rabbit_log_connection:error("MQTT cannot accept a connection: " + "client ID registration timed out"), + %% ignore all exceptions, we are shutting down + catch amqp_connection:close(Conn), + {?CONNACK_SERVER, PState1} + end; + ConnAck -> {ConnAck, PState1} + end + end + end, + {ReturnCode, SessionPresent} = case Return of + {?CONNACK_ACCEPT, Bool} -> {?CONNACK_ACCEPT, Bool}; + Other -> {Other, false} + end, + SendFun(#mqtt_frame{fixed = #mqtt_frame_fixed{type = ?CONNACK}, + variable = #mqtt_frame_connack{ + session_present = SessionPresent, + return_code = ReturnCode}}, + PState5), + case ReturnCode of + ?CONNACK_ACCEPT -> {ok, PState5}; + ?CONNACK_CREDENTIALS -> {error, unauthenticated, PState5}; + ?CONNACK_AUTH -> {error, unauthorized, PState5}; + ?CONNACK_SERVER -> {error, unavailable, PState5}; + ?CONNACK_INVALID_ID -> {error, invalid_client_id, PState5}; + ?CONNACK_PROTO_VER -> {error, unsupported_protocol_version, PState5} + end; + +process_request(?PUBACK, + #mqtt_frame{ + variable = #mqtt_frame_publish{ message_id = MessageId }}, + #proc_state{ channels = {Channel, _}, + awaiting_ack = Awaiting } = PState) -> + %% tag can be missing because of bogus clients and QoS downgrades + case gb_trees:is_defined(MessageId, Awaiting) of + false -> + {ok, PState}; + true -> + Tag = gb_trees:get(MessageId, Awaiting), + amqp_channel:cast(Channel, #'basic.ack'{ delivery_tag = Tag }), + {ok, PState#proc_state{ awaiting_ack = gb_trees:delete(MessageId, Awaiting) }} + end; + +process_request(?PUBLISH, + Frame = #mqtt_frame{ + fixed = Fixed = #mqtt_frame_fixed{ qos = ?QOS_2 }}, + PState) -> + % Downgrade QOS_2 to QOS_1 + process_request(?PUBLISH, + Frame#mqtt_frame{ + fixed = Fixed#mqtt_frame_fixed{ qos = ?QOS_1 }}, + PState); +process_request(?PUBLISH, + #mqtt_frame{ + fixed = #mqtt_frame_fixed{ qos = Qos, + retain = Retain, + dup = Dup }, + variable = #mqtt_frame_publish{ topic_name = Topic, + message_id = MessageId }, + payload = Payload }, + PState = #proc_state{retainer_pid = RPid, + amqp2mqtt_fun = Amqp2MqttFun}) -> + check_publish(Topic, fun() -> + Msg = #mqtt_msg{retain = Retain, + qos = Qos, + topic = Topic, + dup = Dup, + message_id = MessageId, + payload = Payload}, + Result = amqp_pub(Msg, PState), + case Retain of + false -> ok; + true -> hand_off_to_retainer(RPid, Amqp2MqttFun, Topic, Msg) + end, + {ok, Result} + end, PState); + +process_request(?SUBSCRIBE, + #mqtt_frame{ + variable = #mqtt_frame_subscribe{ + message_id = SubscribeMsgId, + topic_table = Topics}, + payload = undefined}, + #proc_state{channels = {Channel, _}, + exchange = Exchange, + retainer_pid = RPid, + send_fun = SendFun, + message_id = StateMsgId, + mqtt2amqp_fun = Mqtt2AmqpFun} = PState0) -> + rabbit_log_connection:debug("Received a SUBSCRIBE for topic(s) ~p", [Topics]), + check_subscribe(Topics, fun() -> + {QosResponse, PState1} = + lists:foldl(fun (#mqtt_topic{name = TopicName, + qos = Qos}, {QosList, PState}) -> + SupportedQos = supported_subs_qos(Qos), + {Queue, #proc_state{subscriptions = Subs} = PState1} = + ensure_queue(SupportedQos, PState), + RoutingKey = Mqtt2AmqpFun(TopicName), + Binding = #'queue.bind'{ + queue = Queue, + exchange = Exchange, + routing_key = RoutingKey}, + #'queue.bind_ok'{} = amqp_channel:call(Channel, Binding), + SupportedQosList = case maps:find(TopicName, Subs) of + {ok, L} -> [SupportedQos|L]; + error -> [SupportedQos] + end, + {[SupportedQos | QosList], + PState1 #proc_state{ + subscriptions = + maps:put(TopicName, SupportedQosList, Subs)}} + end, {[], PState0}, Topics), + SendFun(#mqtt_frame{fixed = #mqtt_frame_fixed{type = ?SUBACK}, + variable = #mqtt_frame_suback{ + message_id = SubscribeMsgId, + qos_table = QosResponse}}, PState1), + %% we may need to send up to length(Topics) messages. + %% if QoS is > 0 then we need to generate a message id, + %% and increment the counter. + StartMsgId = safe_max_id(SubscribeMsgId, StateMsgId), + N = lists:foldl(fun (Topic, Acc) -> + case maybe_send_retained_message(RPid, Topic, Acc, PState1) of + {true, X} -> Acc + X; + false -> Acc + end + end, StartMsgId, Topics), + {ok, PState1#proc_state{message_id = N}} + end, PState0); + +process_request(?UNSUBSCRIBE, + #mqtt_frame{ + variable = #mqtt_frame_subscribe{ message_id = MessageId, + topic_table = Topics }, + payload = undefined }, #proc_state{ channels = {Channel, _}, + exchange = Exchange, + client_id = ClientId, + subscriptions = Subs0, + send_fun = SendFun, + mqtt2amqp_fun = Mqtt2AmqpFun } = PState) -> + rabbit_log_connection:debug("Received an UNSUBSCRIBE for topic(s) ~p", [Topics]), + Queues = rabbit_mqtt_util:subcription_queue_name(ClientId), + Subs1 = + lists:foldl( + fun (#mqtt_topic{ name = TopicName }, Subs) -> + QosSubs = case maps:find(TopicName, Subs) of + {ok, Val} when is_list(Val) -> lists:usort(Val); + error -> [] + end, + RoutingKey = Mqtt2AmqpFun(TopicName), + lists:foreach( + fun (QosSub) -> + Queue = element(QosSub + 1, Queues), + Binding = #'queue.unbind'{ + queue = Queue, + exchange = Exchange, + routing_key = RoutingKey}, + #'queue.unbind_ok'{} = amqp_channel:call(Channel, Binding) + end, QosSubs), + maps:remove(TopicName, Subs) + end, Subs0, Topics), + SendFun(#mqtt_frame{ fixed = #mqtt_frame_fixed { type = ?UNSUBACK }, + variable = #mqtt_frame_suback{ message_id = MessageId }}, + PState), + {ok, PState #proc_state{ subscriptions = Subs1 }}; + +process_request(?PINGREQ, #mqtt_frame{}, #proc_state{ send_fun = SendFun } = PState) -> + rabbit_log_connection:debug("Received a PINGREQ"), + SendFun(#mqtt_frame{ fixed = #mqtt_frame_fixed{ type = ?PINGRESP }}, + PState), + rabbit_log_connection:debug("Sent a PINGRESP"), + {ok, PState}; + +process_request(?DISCONNECT, #mqtt_frame{}, PState) -> + rabbit_log_connection:debug("Received a DISCONNECT"), + {stop, PState}. + +hand_off_to_retainer(RetainerPid, Amqp2MqttFun, Topic0, #mqtt_msg{payload = <<"">>}) -> + Topic1 = Amqp2MqttFun(Topic0), + rabbit_mqtt_retainer:clear(RetainerPid, Topic1), + ok; +hand_off_to_retainer(RetainerPid, Amqp2MqttFun, Topic0, Msg) -> + Topic1 = Amqp2MqttFun(Topic0), + rabbit_mqtt_retainer:retain(RetainerPid, Topic1, Msg), + ok. + +maybe_send_retained_message(RPid, #mqtt_topic{name = Topic0, qos = SubscribeQos}, MsgId, + #proc_state{ send_fun = SendFun, + amqp2mqtt_fun = Amqp2MqttFun } = PState) -> + Topic1 = Amqp2MqttFun(Topic0), + case rabbit_mqtt_retainer:fetch(RPid, Topic1) of + undefined -> false; + Msg -> + %% calculate effective QoS as the lower value of SUBSCRIBE frame QoS + %% and retained message QoS. The spec isn't super clear on this, we + %% do what Mosquitto does, per user feedback. + Qos = erlang:min(SubscribeQos, Msg#mqtt_msg.qos), + Id = case Qos of + ?QOS_0 -> undefined; + ?QOS_1 -> MsgId + end, + SendFun(#mqtt_frame{fixed = #mqtt_frame_fixed{ + type = ?PUBLISH, + qos = Qos, + dup = false, + retain = Msg#mqtt_msg.retain + }, variable = #mqtt_frame_publish{ + message_id = Id, + topic_name = Topic1 + }, + payload = Msg#mqtt_msg.payload}, PState), + case Qos of + ?QOS_0 -> false; + ?QOS_1 -> {true, 1} + end + end. + +amqp_callback({#'basic.deliver'{ consumer_tag = ConsumerTag, + delivery_tag = DeliveryTag, + routing_key = RoutingKey }, + #amqp_msg{ props = #'P_basic'{ headers = Headers }, + payload = Payload }, + DeliveryCtx} = Delivery, + #proc_state{ channels = {Channel, _}, + awaiting_ack = Awaiting, + message_id = MsgId, + send_fun = SendFun, + amqp2mqtt_fun = Amqp2MqttFun } = PState) -> + amqp_channel:notify_received(DeliveryCtx), + case {delivery_dup(Delivery), delivery_qos(ConsumerTag, Headers, PState)} of + {true, {?QOS_0, ?QOS_1}} -> + amqp_channel:cast( + Channel, #'basic.ack'{ delivery_tag = DeliveryTag }), + {ok, PState}; + {true, {?QOS_0, ?QOS_0}} -> + {ok, PState}; + {Dup, {DeliveryQos, _SubQos} = Qos} -> + TopicName = Amqp2MqttFun(RoutingKey), + SendFun( + #mqtt_frame{ fixed = #mqtt_frame_fixed{ + type = ?PUBLISH, + qos = DeliveryQos, + dup = Dup }, + variable = #mqtt_frame_publish{ + message_id = + case DeliveryQos of + ?QOS_0 -> undefined; + ?QOS_1 -> MsgId + end, + topic_name = TopicName }, + payload = Payload}, PState), + case Qos of + {?QOS_0, ?QOS_0} -> + {ok, PState}; + {?QOS_1, ?QOS_1} -> + Awaiting1 = gb_trees:insert(MsgId, DeliveryTag, Awaiting), + PState1 = PState#proc_state{ awaiting_ack = Awaiting1 }, + PState2 = next_msg_id(PState1), + {ok, PState2}; + {?QOS_0, ?QOS_1} -> + amqp_channel:cast( + Channel, #'basic.ack'{ delivery_tag = DeliveryTag }), + {ok, PState} + end + end; + +amqp_callback(#'basic.ack'{ multiple = true, delivery_tag = Tag } = Ack, + PState = #proc_state{ unacked_pubs = UnackedPubs, + send_fun = SendFun }) -> + case gb_trees:size(UnackedPubs) > 0 andalso + gb_trees:take_smallest(UnackedPubs) of + {TagSmall, MsgId, UnackedPubs1} when TagSmall =< Tag -> + SendFun( + #mqtt_frame{ fixed = #mqtt_frame_fixed{ type = ?PUBACK }, + variable = #mqtt_frame_publish{ message_id = MsgId }}, + PState), + amqp_callback(Ack, PState #proc_state{ unacked_pubs = UnackedPubs1 }); + _ -> + {ok, PState} + end; + +amqp_callback(#'basic.ack'{ multiple = false, delivery_tag = Tag }, + PState = #proc_state{ unacked_pubs = UnackedPubs, + send_fun = SendFun }) -> + SendFun( + #mqtt_frame{ fixed = #mqtt_frame_fixed{ type = ?PUBACK }, + variable = #mqtt_frame_publish{ + message_id = gb_trees:get( + Tag, UnackedPubs) }}, PState), + {ok, PState #proc_state{ unacked_pubs = gb_trees:delete(Tag, UnackedPubs) }}. + +delivery_dup({#'basic.deliver'{ redelivered = Redelivered }, + #amqp_msg{ props = #'P_basic'{ headers = Headers }}, + _DeliveryCtx}) -> + case rabbit_mqtt_util:table_lookup(Headers, <<"x-mqtt-dup">>) of + undefined -> Redelivered; + {bool, Dup} -> Redelivered orelse Dup + end. + +ensure_valid_mqtt_message_id(Id) when Id >= 16#ffff -> + 1; +ensure_valid_mqtt_message_id(Id) -> + Id. + +safe_max_id(Id0, Id1) -> + ensure_valid_mqtt_message_id(erlang:max(Id0, Id1)). + +next_msg_id(PState = #proc_state{ message_id = MsgId0 }) -> + MsgId1 = ensure_valid_mqtt_message_id(MsgId0 + 1), + PState#proc_state{ message_id = MsgId1 }. + +%% decide at which qos level to deliver based on subscription +%% and the message publish qos level. non-MQTT publishes are +%% assumed to be qos 1, regardless of delivery_mode. +delivery_qos(Tag, _Headers, #proc_state{ consumer_tags = {Tag, _} }) -> + {?QOS_0, ?QOS_0}; +delivery_qos(Tag, Headers, #proc_state{ consumer_tags = {_, Tag} }) -> + case rabbit_mqtt_util:table_lookup(Headers, <<"x-mqtt-publish-qos">>) of + {byte, Qos} -> {lists:min([Qos, ?QOS_1]), ?QOS_1}; + undefined -> {?QOS_1, ?QOS_1} + end. + +maybe_clean_sess(PState = #proc_state { clean_sess = false, + connection = Conn, + client_id = ClientId }) -> + SessionPresent = session_present(Conn, ClientId), + {_Queue, PState1} = ensure_queue(?QOS_1, PState), + {SessionPresent, PState1}; +maybe_clean_sess(PState = #proc_state { clean_sess = true, + connection = Conn, + client_id = ClientId }) -> + {_, Queue} = rabbit_mqtt_util:subcription_queue_name(ClientId), + {ok, Channel} = amqp_connection:open_channel(Conn), + ok = try amqp_channel:call(Channel, #'queue.delete'{ queue = Queue }) of + #'queue.delete_ok'{} -> ok + catch + exit:_Error -> ok + after + amqp_channel:close(Channel) + end, + {false, PState}. + +session_present(Conn, ClientId) -> + {_, QueueQ1} = rabbit_mqtt_util:subcription_queue_name(ClientId), + Declare = #'queue.declare'{queue = QueueQ1, + passive = true}, + {ok, Channel} = amqp_connection:open_channel(Conn), + try + amqp_channel:call(Channel, Declare), + amqp_channel:close(Channel), + true + catch exit:{{shutdown, {server_initiated_close, ?NOT_FOUND, _Text}}, _} -> + false + end. + +make_will_msg(#mqtt_frame_connect{ will_flag = false }) -> + undefined; +make_will_msg(#mqtt_frame_connect{ will_retain = Retain, + will_qos = Qos, + will_topic = Topic, + will_msg = Msg }) -> + #mqtt_msg{ retain = Retain, + qos = Qos, + topic = Topic, + dup = false, + payload = Msg }. + +process_login(_UserBin, _PassBin, _ProtoVersion, + #proc_state{channels = {Channel, _}, + peer_addr = Addr, + auth_state = #auth_state{username = Username, + vhost = VHost}}) when is_pid(Channel) -> + UsernameStr = rabbit_data_coercion:to_list(Username), + VHostStr = rabbit_data_coercion:to_list(VHost), + rabbit_core_metrics:auth_attempt_failed(list_to_binary(inet:ntoa(Addr)), Username, mqtt), + rabbit_log_connection:warning("MQTT detected duplicate connect/login attempt for user ~p, vhost ~p", + [UsernameStr, VHostStr]), + connack_dup_auth; +process_login(UserBin, PassBin, ProtoVersion, + #proc_state{channels = {undefined, undefined}, + socket = Sock, + adapter_info = AdapterInfo, + ssl_login_name = SslLoginName, + peer_addr = Addr}) -> + {ok, {_, _, _, ToPort}} = rabbit_net:socket_ends(Sock, inbound), + {VHostPickedUsing, {VHost, UsernameBin}} = get_vhost(UserBin, SslLoginName, ToPort), + rabbit_log_connection:info( + "MQTT vhost picked using ~s~n", + [human_readable_vhost_lookup_strategy(VHostPickedUsing)]), + RemoteAddress = list_to_binary(inet:ntoa(Addr)), + case rabbit_vhost:exists(VHost) of + true -> + case amqp_connection:start(#amqp_params_direct{ + username = UsernameBin, + password = PassBin, + virtual_host = VHost, + adapter_info = set_proto_version(AdapterInfo, ProtoVersion)}) of + {ok, Connection} -> + case rabbit_access_control:check_user_loopback(UsernameBin, Addr) of + ok -> + rabbit_core_metrics:auth_attempt_succeeded(RemoteAddress, UsernameBin, + mqtt), + [{internal_user, InternalUser}] = amqp_connection:info( + Connection, [internal_user]), + {?CONNACK_ACCEPT, Connection, VHost, + #auth_state{user = InternalUser, + username = UsernameBin, + vhost = VHost}}; + not_allowed -> + rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, + mqtt), + amqp_connection:close(Connection), + rabbit_log_connection:warning( + "MQTT login failed for ~p access_refused " + "(access must be from localhost)~n", + [binary_to_list(UsernameBin)]), + ?CONNACK_AUTH + end; + {error, {auth_failure, Explanation}} -> + rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, mqtt), + rabbit_log_connection:error("MQTT login failed for user '~p' auth_failure: ~s~n", + [binary_to_list(UserBin), Explanation]), + ?CONNACK_CREDENTIALS; + {error, access_refused} -> + rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, mqtt), + rabbit_log_connection:warning("MQTT login failed for user '~p': access_refused " + "(vhost access not allowed)~n", + [binary_to_list(UserBin)]), + ?CONNACK_AUTH; + {error, not_allowed} -> + rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, mqtt), + %% when vhost allowed for TLS connection + rabbit_log_connection:warning("MQTT login failed for ~p access_refused " + "(vhost access not allowed)~n", + [binary_to_list(UserBin)]), + ?CONNACK_AUTH + end; + false -> + rabbit_core_metrics:auth_attempt_failed(RemoteAddress, UsernameBin, mqtt), + rabbit_log_connection:error("MQTT login failed for user '~p' auth_failure: vhost ~s does not exist~n", + [binary_to_list(UserBin), VHost]), + ?CONNACK_CREDENTIALS + end. + +get_vhost(UserBin, none, Port) -> + get_vhost_no_ssl(UserBin, Port); +get_vhost(UserBin, undefined, Port) -> + get_vhost_no_ssl(UserBin, Port); +get_vhost(UserBin, SslLogin, Port) -> + get_vhost_ssl(UserBin, SslLogin, Port). + +get_vhost_no_ssl(UserBin, Port) -> + case vhost_in_username(UserBin) of + true -> + {vhost_in_username_or_default, get_vhost_username(UserBin)}; + false -> + PortVirtualHostMapping = rabbit_runtime_parameters:value_global( + mqtt_port_to_vhost_mapping + ), + case get_vhost_from_port_mapping(Port, PortVirtualHostMapping) of + undefined -> + {default_vhost, {rabbit_mqtt_util:env(vhost), UserBin}}; + VHost -> + {port_to_vhost_mapping, {VHost, UserBin}} + end + end. + +get_vhost_ssl(UserBin, SslLoginName, Port) -> + UserVirtualHostMapping = rabbit_runtime_parameters:value_global( + mqtt_default_vhosts + ), + case get_vhost_from_user_mapping(SslLoginName, UserVirtualHostMapping) of + undefined -> + PortVirtualHostMapping = rabbit_runtime_parameters:value_global( + mqtt_port_to_vhost_mapping + ), + case get_vhost_from_port_mapping(Port, PortVirtualHostMapping) of + undefined -> + {vhost_in_username_or_default, get_vhost_username(UserBin)}; + VHostFromPortMapping -> + {port_to_vhost_mapping, {VHostFromPortMapping, UserBin}} + end; + VHostFromCertMapping -> + {cert_to_vhost_mapping, {VHostFromCertMapping, UserBin}} + end. + +vhost_in_username(UserBin) -> + case application:get_env(?APP, ignore_colons_in_username) of + {ok, true} -> false; + _ -> + %% split at the last colon, disallowing colons in username + case re:split(UserBin, ":(?!.*?:)") of + [_, _] -> true; + [UserBin] -> false + end + end. + +get_vhost_username(UserBin) -> + Default = {rabbit_mqtt_util:env(vhost), UserBin}, + case application:get_env(?APP, ignore_colons_in_username) of + {ok, true} -> Default; + _ -> + %% split at the last colon, disallowing colons in username + case re:split(UserBin, ":(?!.*?:)") of + [Vhost, UserName] -> {Vhost, UserName}; + [UserBin] -> Default + end + end. + +get_vhost_from_user_mapping(_User, not_found) -> + undefined; +get_vhost_from_user_mapping(User, Mapping) -> + M = rabbit_data_coercion:to_proplist(Mapping), + case rabbit_misc:pget(User, M) of + undefined -> + undefined; + VHost -> + VHost + end. + +get_vhost_from_port_mapping(_Port, not_found) -> + undefined; +get_vhost_from_port_mapping(Port, Mapping) -> + M = rabbit_data_coercion:to_proplist(Mapping), + Res = case rabbit_misc:pget(rabbit_data_coercion:to_binary(Port), M) of + undefined -> + undefined; + VHost -> + VHost + end, + Res. + +human_readable_vhost_lookup_strategy(vhost_in_username_or_default) -> + "vhost in username or default"; +human_readable_vhost_lookup_strategy(port_to_vhost_mapping) -> + "MQTT port to vhost mapping"; +human_readable_vhost_lookup_strategy(cert_to_vhost_mapping) -> + "client certificate to vhost mapping"; +human_readable_vhost_lookup_strategy(default_vhost) -> + "plugin configuration or default"; +human_readable_vhost_lookup_strategy(Val) -> + atom_to_list(Val). + +creds(User, Pass, SSLLoginName) -> + DefaultUser = rabbit_mqtt_util:env(default_user), + DefaultPass = rabbit_mqtt_util:env(default_pass), + {ok, Anon} = application:get_env(?APP, allow_anonymous), + {ok, TLSAuth} = application:get_env(?APP, ssl_cert_login), + HaveDefaultCreds = Anon =:= true andalso + is_binary(DefaultUser) andalso + is_binary(DefaultPass), + + CredentialsProvided = User =/= undefined orelse + Pass =/= undefined, + + CorrectCredentials = is_list(User) andalso + is_list(Pass), + + SSLLoginProvided = TLSAuth =:= true andalso + SSLLoginName =/= none, + + case {CredentialsProvided, CorrectCredentials, SSLLoginProvided, HaveDefaultCreds} of + %% Username and password take priority + {true, true, _, _} -> {list_to_binary(User), + list_to_binary(Pass)}; + %% Either username or password is provided + {true, false, _, _} -> {invalid_creds, {User, Pass}}; + %% rabbitmq_mqtt.ssl_cert_login is true. SSL user name provided. + %% Authenticating using username only. + {false, false, true, _} -> {SSLLoginName, none}; + %% Anonymous connection uses default credentials + {false, false, false, true} -> {DefaultUser, DefaultPass}; + _ -> nocreds + end. + +supported_subs_qos(?QOS_0) -> ?QOS_0; +supported_subs_qos(?QOS_1) -> ?QOS_1; +supported_subs_qos(?QOS_2) -> ?QOS_1. + +delivery_mode(?QOS_0) -> 1; +delivery_mode(?QOS_1) -> 2; +delivery_mode(?QOS_2) -> 2. + +%% different qos subscriptions are received in different queues +%% with appropriate durability and timeout arguments +%% this will lead to duplicate messages for overlapping subscriptions +%% with different qos values - todo: prevent duplicates +ensure_queue(Qos, #proc_state{ channels = {Channel, _}, + client_id = ClientId, + clean_sess = CleanSess, + consumer_tags = {TagQ0, TagQ1} = Tags} = PState) -> + {QueueQ0, QueueQ1} = rabbit_mqtt_util:subcription_queue_name(ClientId), + Qos1Args = case {rabbit_mqtt_util:env(subscription_ttl), CleanSess} of + {undefined, _} -> + []; + {Ms, false} when is_integer(Ms) -> + [{<<"x-expires">>, long, Ms}]; + _ -> + [] + end, + QueueSetup = + case {TagQ0, TagQ1, Qos} of + {undefined, _, ?QOS_0} -> + {QueueQ0, + #'queue.declare'{ queue = QueueQ0, + durable = false, + auto_delete = true }, + #'basic.consume'{ queue = QueueQ0, + no_ack = true }}; + {_, undefined, ?QOS_1} -> + {QueueQ1, + #'queue.declare'{ queue = QueueQ1, + durable = true, + %% Clean session means a transient connection, + %% translating into auto-delete. + %% + %% see rabbitmq/rabbitmq-mqtt#37 + auto_delete = CleanSess, + arguments = Qos1Args }, + #'basic.consume'{ queue = QueueQ1, + no_ack = false }}; + {_, _, ?QOS_0} -> + {exists, QueueQ0}; + {_, _, ?QOS_1} -> + {exists, QueueQ1} + end, + case QueueSetup of + {Queue, Declare, Consume} -> + #'queue.declare_ok'{} = amqp_channel:call(Channel, Declare), + #'basic.consume_ok'{ consumer_tag = Tag } = + amqp_channel:call(Channel, Consume), + {Queue, PState #proc_state{ consumer_tags = setelement(Qos+1, Tags, Tag) }}; + {exists, Q} -> + {Q, PState} + end. + +send_will(PState = #proc_state{will_msg = undefined}) -> + PState; + +send_will(PState = #proc_state{will_msg = WillMsg = #mqtt_msg{retain = Retain, + topic = Topic}, + retainer_pid = RPid, + channels = {ChQos0, ChQos1}, + amqp2mqtt_fun = Amqp2MqttFun}) -> + case check_topic_access(Topic, write, PState) of + ok -> + amqp_pub(WillMsg, PState), + case Retain of + false -> ok; + true -> + hand_off_to_retainer(RPid, Amqp2MqttFun, Topic, WillMsg) + end; + Error -> + rabbit_log:warning( + "Could not send last will: ~p~n", + [Error]) + end, + case ChQos1 of + undefined -> ok; + _ -> amqp_channel:close(ChQos1) + end, + case ChQos0 of + undefined -> ok; + _ -> amqp_channel:close(ChQos0) + end, + PState #proc_state{ channels = {undefined, undefined} }. + +amqp_pub(undefined, PState) -> + PState; + +%% set up a qos1 publishing channel if necessary +%% this channel will only be used for publishing, not consuming +amqp_pub(Msg = #mqtt_msg{ qos = ?QOS_1 }, + PState = #proc_state{ channels = {ChQos0, undefined}, + awaiting_seqno = undefined, + connection = Conn }) -> + {ok, Channel} = amqp_connection:open_channel(Conn), + #'confirm.select_ok'{} = amqp_channel:call(Channel, #'confirm.select'{}), + amqp_channel:register_confirm_handler(Channel, self()), + amqp_pub(Msg, PState #proc_state{ channels = {ChQos0, Channel}, + awaiting_seqno = 1 }); + +amqp_pub(#mqtt_msg{ qos = Qos, + topic = Topic, + dup = Dup, + message_id = MessageId, + payload = Payload }, + PState = #proc_state{ channels = {ChQos0, ChQos1}, + exchange = Exchange, + unacked_pubs = UnackedPubs, + awaiting_seqno = SeqNo, + mqtt2amqp_fun = Mqtt2AmqpFun }) -> + RoutingKey = Mqtt2AmqpFun(Topic), + Method = #'basic.publish'{ exchange = Exchange, + routing_key = RoutingKey }, + Headers = [{<<"x-mqtt-publish-qos">>, byte, Qos}, + {<<"x-mqtt-dup">>, bool, Dup}], + Msg = #amqp_msg{ props = #'P_basic'{ headers = Headers, + delivery_mode = delivery_mode(Qos)}, + payload = Payload }, + {UnackedPubs1, Ch, SeqNo1} = + case Qos =:= ?QOS_1 andalso MessageId =/= undefined of + true -> {gb_trees:enter(SeqNo, MessageId, UnackedPubs), ChQos1, + SeqNo + 1}; + false -> {UnackedPubs, ChQos0, SeqNo} + end, + amqp_channel:cast_flow(Ch, Method, Msg), + PState #proc_state{ unacked_pubs = UnackedPubs1, + awaiting_seqno = SeqNo1 }. + +adapter_info(Sock, ProtoName) -> + amqp_connection:socket_adapter_info(Sock, {ProtoName, "N/A"}). + +set_proto_version(AdapterInfo = #amqp_adapter_info{protocol = {Proto, _}}, Vsn) -> + AdapterInfo#amqp_adapter_info{protocol = {Proto, + human_readable_mqtt_version(Vsn)}}. + +human_readable_mqtt_version(3) -> + "3.1.0"; +human_readable_mqtt_version(4) -> + "3.1.1"; +human_readable_mqtt_version(_) -> + "N/A". + +serialise_and_send_to_client(Frame, #proc_state{ socket = Sock }) -> + try rabbit_net:port_command(Sock, rabbit_mqtt_frame:serialise(Frame)) of + Res -> + Res + catch _:Error -> + rabbit_log_connection:error("MQTT: a socket write failed, the socket might already be closed"), + rabbit_log_connection:debug("Failed to write to socket ~p, error: ~p, frame: ~p", + [Sock, Error, Frame]) + end. + +close_connection(PState = #proc_state{ connection = undefined }) -> + PState; +close_connection(PState = #proc_state{ connection = Connection, + client_id = ClientId }) -> + % todo: maybe clean session + case ClientId of + undefined -> ok; + _ -> + case rabbit_mqtt_collector:unregister(ClientId, self()) of + ok -> ok; + %% ignore as we are shutting down + {timeout, _} -> ok + end + end, + %% ignore noproc or other exceptions, we are shutting down + catch amqp_connection:close(Connection), + PState #proc_state{ channels = {undefined, undefined}, + connection = undefined }. + +handle_pre_hibernate() -> + erase(topic_permission_cache), + ok. + +handle_ra_event({applied, [{Corr, ok}]}, + PState = #proc_state{register_state = {pending, Corr}}) -> + %% success case - command was applied transition into registered state + PState#proc_state{register_state = registered}; +handle_ra_event({not_leader, Leader, Corr}, + PState = #proc_state{register_state = {pending, Corr}, + client_id = ClientId}) -> + %% retry command against actual leader + {ok, NewCorr} = rabbit_mqtt_collector:register(Leader, ClientId, self()), + PState#proc_state{register_state = {pending, NewCorr}}; +handle_ra_event(register_timeout, + PState = #proc_state{register_state = {pending, _Corr}, + client_id = ClientId}) -> + {ok, NewCorr} = rabbit_mqtt_collector:register(ClientId, self()), + PState#proc_state{register_state = {pending, NewCorr}}; +handle_ra_event(register_timeout, PState) -> + PState; +handle_ra_event(Evt, PState) -> + %% log these? + rabbit_log:debug("unhandled ra_event: ~w ~n", [Evt]), + PState. + +%% NB: check_*: MQTT spec says we should ack normally, ie pretend there +%% was no auth error, but here we are closing the connection with an error. This +%% is what happens anyway if there is an authorization failure at the AMQP 0-9-1 client level. + +check_publish(TopicName, Fn, PState) -> + case check_topic_access(TopicName, write, PState) of + ok -> Fn(); + _ -> {error, unauthorized, PState} + end. + +check_subscribe([], Fn, _) -> + Fn(); + +check_subscribe([#mqtt_topic{name = TopicName} | Topics], Fn, PState) -> + case check_topic_access(TopicName, read, PState) of + ok -> check_subscribe(Topics, Fn, PState); + _ -> {error, unauthorized, PState} + end. + +check_topic_access(TopicName, Access, + #proc_state{ + auth_state = #auth_state{user = User = #user{username = Username}, + vhost = VHost}, + exchange = Exchange, + client_id = ClientId, + mqtt2amqp_fun = Mqtt2AmqpFun }) -> + Cache = + case get(topic_permission_cache) of + undefined -> []; + Other -> Other + end, + + Key = {TopicName, Username, ClientId, VHost, Exchange, Access}, + case lists:member(Key, Cache) of + true -> + ok; + false -> + Resource = #resource{virtual_host = VHost, + kind = topic, + name = Exchange}, + + RoutingKey = Mqtt2AmqpFun(TopicName), + Context = #{routing_key => RoutingKey, + variable_map => #{ + <<"username">> => Username, + <<"vhost">> => VHost, + <<"client_id">> => rabbit_data_coercion:to_binary(ClientId) + } + }, + + try rabbit_access_control:check_topic_access(User, Resource, Access, Context) of + ok -> + CacheTail = lists:sublist(Cache, ?MAX_TOPIC_PERMISSION_CACHE_SIZE - 1), + put(topic_permission_cache, [Key | CacheTail]), + ok; + R -> + R + catch + _:{amqp_error, access_refused, Msg, _} -> + rabbit_log:error("operation resulted in an error (access_refused): ~p~n", [Msg]), + {error, access_refused}; + _:Error -> + rabbit_log:error("~p~n", [Error]), + {error, access_refused} + end + end. + +info(consumer_tags, #proc_state{consumer_tags = Val}) -> Val; +info(unacked_pubs, #proc_state{unacked_pubs = Val}) -> Val; +info(awaiting_ack, #proc_state{awaiting_ack = Val}) -> Val; +info(awaiting_seqno, #proc_state{awaiting_seqno = Val}) -> Val; +info(message_id, #proc_state{message_id = Val}) -> Val; +info(client_id, #proc_state{client_id = Val}) -> + rabbit_data_coercion:to_binary(Val); +info(clean_sess, #proc_state{clean_sess = Val}) -> Val; +info(will_msg, #proc_state{will_msg = Val}) -> Val; +info(channels, #proc_state{channels = Val}) -> Val; +info(exchange, #proc_state{exchange = Val}) -> Val; +info(adapter_info, #proc_state{adapter_info = Val}) -> Val; +info(ssl_login_name, #proc_state{ssl_login_name = Val}) -> Val; +info(retainer_pid, #proc_state{retainer_pid = Val}) -> Val; +info(user, #proc_state{auth_state = #auth_state{username = Val}}) -> Val; +info(vhost, #proc_state{auth_state = #auth_state{vhost = Val}}) -> Val; +info(host, #proc_state{adapter_info = #amqp_adapter_info{host = Val}}) -> Val; +info(port, #proc_state{adapter_info = #amqp_adapter_info{port = Val}}) -> Val; +info(peer_host, #proc_state{adapter_info = #amqp_adapter_info{peer_host = Val}}) -> Val; +info(peer_port, #proc_state{adapter_info = #amqp_adapter_info{peer_port = Val}}) -> Val; +info(protocol, #proc_state{adapter_info = #amqp_adapter_info{protocol = Val}}) -> + case Val of + {Proto, Version} -> {Proto, rabbit_data_coercion:to_binary(Version)}; + Other -> Other + end; +info(channels, PState) -> additional_info(channels, PState); +info(channel_max, PState) -> additional_info(channel_max, PState); +info(frame_max, PState) -> additional_info(frame_max, PState); +info(client_properties, PState) -> additional_info(client_properties, PState); +info(ssl, PState) -> additional_info(ssl, PState); +info(ssl_protocol, PState) -> additional_info(ssl_protocol, PState); +info(ssl_key_exchange, PState) -> additional_info(ssl_key_exchange, PState); +info(ssl_cipher, PState) -> additional_info(ssl_cipher, PState); +info(ssl_hash, PState) -> additional_info(ssl_hash, PState); +info(Other, _) -> throw({bad_argument, Other}). + + +additional_info(Key, + #proc_state{adapter_info = + #amqp_adapter_info{additional_info = AddInfo}}) -> + proplists:get_value(Key, AddInfo). diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl new file mode 100644 index 0000000000..39c0761321 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_reader.erl @@ -0,0 +1,480 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_reader). + +%% Transitional step until we can require Erlang/OTP 21 and +%% use the now recommended try/catch syntax for obtaining the stack trace. +-compile(nowarn_deprecated_function). + +-behaviour(gen_server2). + +-export([start_link/2]). +-export([init/1, handle_call/3, handle_cast/2, handle_info/2, + code_change/3, terminate/2, handle_pre_hibernate/1]). + +-export([conserve_resources/3, start_keepalive/2, + close_connection/2]). + +-export([ssl_login_name/1]). +-export([info/2]). + +-include_lib("amqp_client/include/amqp_client.hrl"). +-include("rabbit_mqtt.hrl"). + +-define(SIMPLE_METRICS, [pid, recv_oct, send_oct, reductions]). +-define(OTHER_METRICS, [recv_cnt, send_cnt, send_pend, garbage_collection, state]). + +%%---------------------------------------------------------------------------- + +start_link(KeepaliveSup, Ref) -> + Pid = proc_lib:spawn_link(?MODULE, init, + [[KeepaliveSup, Ref]]), + + {ok, Pid}. + +conserve_resources(Pid, _, {_, Conserve, _}) -> + Pid ! {conserve_resources, Conserve}, + ok. + +info(Pid, InfoItems) -> + case InfoItems -- ?INFO_ITEMS of + [] -> gen_server2:call(Pid, {info, InfoItems}); + UnknownItems -> throw({bad_argument, UnknownItems}) + end. + +close_connection(Pid, Reason) -> + gen_server:cast(Pid, {close_connection, Reason}). + +%%---------------------------------------------------------------------------- + +init([KeepaliveSup, Ref]) -> + process_flag(trap_exit, true), + {ok, Sock} = rabbit_networking:handshake(Ref, + application:get_env(rabbitmq_mqtt, proxy_protocol, false)), + RealSocket = rabbit_net:unwrap_socket(Sock), + case rabbit_net:connection_string(Sock, inbound) of + {ok, ConnStr} -> + rabbit_log_connection:debug("MQTT accepting TCP connection ~p (~s)~n", [self(), ConnStr]), + rabbit_alarm:register( + self(), {?MODULE, conserve_resources, []}), + ProcessorState = rabbit_mqtt_processor:initial_state(Sock,ssl_login_name(RealSocket)), + gen_server2:enter_loop(?MODULE, [], + rabbit_event:init_stats_timer( + control_throttle( + #state{socket = RealSocket, + conn_name = ConnStr, + await_recv = false, + connection_state = running, + received_connect_frame = false, + keepalive = {none, none}, + keepalive_sup = KeepaliveSup, + conserve = false, + parse_state = rabbit_mqtt_frame:initial_state(), + proc_state = ProcessorState }), #state.stats_timer), + {backoff, 1000, 1000, 10000}); + {network_error, Reason} -> + rabbit_net:fast_close(RealSocket), + terminate({shutdown, Reason}, undefined); + {error, enotconn} -> + rabbit_net:fast_close(RealSocket), + terminate(shutdown, undefined); + {error, Reason} -> + rabbit_net:fast_close(RealSocket), + terminate({network_error, Reason}, undefined) + end. + +handle_call({info, InfoItems}, _From, State) -> + Infos = lists:map( + fun(InfoItem) -> + {InfoItem, info_internal(InfoItem, State)} + end, + InfoItems), + {reply, Infos, State}; + +handle_call(Msg, From, State) -> + {stop, {mqtt_unexpected_call, Msg, From}, State}. + +handle_cast(duplicate_id, + State = #state{ proc_state = PState, + conn_name = ConnName }) -> + rabbit_log_connection:warning("MQTT disconnecting client ~p with duplicate id '~s'~n", + [ConnName, rabbit_mqtt_processor:info(client_id, PState)]), + {stop, {shutdown, duplicate_id}, State}; + +handle_cast(decommission_node, + State = #state{ proc_state = PState, + conn_name = ConnName }) -> + rabbit_log_connection:warning("MQTT disconnecting client ~p with client ID '~s' as its node is about" + " to be decommissioned~n", + [ConnName, rabbit_mqtt_processor:info(client_id, PState)]), + {stop, {shutdown, decommission_node}, State}; + +handle_cast({close_connection, Reason}, + State = #state{conn_name = ConnName, proc_state = PState}) -> + rabbit_log_connection:warning("MQTT disconnecting client ~p with client ID '~s', reason: ~s", + [ConnName, rabbit_mqtt_processor:info(client_id, PState), Reason]), + {stop, {shutdown, server_initiated_close}, State}; + +handle_cast(Msg, State) -> + {stop, {mqtt_unexpected_cast, Msg}, State}. + +handle_info({#'basic.deliver'{}, #amqp_msg{}, _DeliveryCtx} = Delivery, + State = #state{ proc_state = ProcState }) -> + callback_reply(State, rabbit_mqtt_processor:amqp_callback(Delivery, + ProcState)); + +handle_info(#'basic.ack'{} = Ack, State = #state{ proc_state = ProcState }) -> + callback_reply(State, rabbit_mqtt_processor:amqp_callback(Ack, ProcState)); + +handle_info(#'basic.consume_ok'{}, State) -> + {noreply, State, hibernate}; + +handle_info(#'basic.cancel'{}, State) -> + {stop, {shutdown, subscription_cancelled}, State}; + +handle_info({'EXIT', _Conn, Reason}, State) -> + {stop, {connection_died, Reason}, State}; + +handle_info({Tag, Sock, Data}, + State = #state{ socket = Sock, connection_state = blocked }) + when Tag =:= tcp; Tag =:= ssl -> + {noreply, State#state{ deferred_recv = Data }, hibernate}; + +handle_info({Tag, Sock, Data}, + State = #state{ socket = Sock, connection_state = running }) + when Tag =:= tcp; Tag =:= ssl -> + process_received_bytes( + Data, control_throttle(State #state{ await_recv = false })); + +handle_info({Tag, Sock}, State = #state{socket = Sock}) + when Tag =:= tcp_closed; Tag =:= ssl_closed -> + network_error(closed, State); + +handle_info({Tag, Sock, Reason}, State = #state{socket = Sock}) + when Tag =:= tcp_error; Tag =:= ssl_error -> + network_error(Reason, State); + +handle_info({inet_reply, Sock, ok}, State = #state{socket = Sock}) -> + {noreply, State, hibernate}; + +handle_info({inet_reply, Sock, {error, Reason}}, State = #state{socket = Sock}) -> + network_error(Reason, State); + +handle_info({conserve_resources, Conserve}, State) -> + maybe_process_deferred_recv( + control_throttle(State #state{ conserve = Conserve })); + +handle_info({bump_credit, Msg}, State) -> + credit_flow:handle_bump_msg(Msg), + maybe_process_deferred_recv(control_throttle(State)); + +handle_info({start_keepalives, Keepalive}, + State = #state { keepalive_sup = KeepaliveSup, socket = Sock }) -> + %% Only the client has the responsibility for sending keepalives + SendFun = fun() -> ok end, + Parent = self(), + ReceiveFun = fun() -> Parent ! keepalive_timeout end, + Heartbeater = rabbit_heartbeat:start( + KeepaliveSup, Sock, 0, SendFun, Keepalive, ReceiveFun), + {noreply, State #state { keepalive = Heartbeater }}; + +handle_info(keepalive_timeout, State = #state {conn_name = ConnStr, + proc_state = PState}) -> + rabbit_log_connection:error("closing MQTT connection ~p (keepalive timeout)~n", [ConnStr]), + send_will_and_terminate(PState, {shutdown, keepalive_timeout}, State); + +handle_info(emit_stats, State) -> + {noreply, emit_stats(State), hibernate}; + +handle_info({ra_event, _From, Evt}, + #state{proc_state = PState} = State) -> + %% handle applied event to ensure registration command actually got applied + %% handle not_leader notification in case we send the command to a non-leader + PState1 = rabbit_mqtt_processor:handle_ra_event(Evt, PState), + {noreply, State#state{proc_state = PState1}, hibernate}; + +handle_info(Msg, State) -> + {stop, {mqtt_unexpected_msg, Msg}, State}. + +terminate(Reason, State) -> + maybe_emit_stats(State), + do_terminate(Reason, State). + +handle_pre_hibernate(State) -> + rabbit_mqtt_processor:handle_pre_hibernate(), + {hibernate, State}. + +do_terminate({network_error, {ssl_upgrade_error, closed}, ConnStr}, _State) -> + rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: connection closed~n", + [ConnStr]); + +do_terminate({network_error, + {ssl_upgrade_error, + {tls_alert, "handshake failure"}}, ConnStr}, _State) -> + log_tls_alert(handshake_failure, ConnStr); +do_terminate({network_error, + {ssl_upgrade_error, + {tls_alert, "unknown ca"}}, ConnStr}, _State) -> + log_tls_alert(unknown_ca, ConnStr); +do_terminate({network_error, + {ssl_upgrade_error, + {tls_alert, {Err, _}}}, ConnStr}, _State) -> + log_tls_alert(Err, ConnStr); +do_terminate({network_error, + {ssl_upgrade_error, + {tls_alert, Alert}}, ConnStr}, _State) -> + log_tls_alert(Alert, ConnStr); +do_terminate({network_error, {ssl_upgrade_error, Reason}, ConnStr}, _State) -> + rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: ~p~n", + [ConnStr, Reason]); + +do_terminate({network_error, Reason, ConnStr}, _State) -> + rabbit_log_connection:error("MQTT detected network error on ~s: ~p~n", + [ConnStr, Reason]); + +do_terminate({network_error, Reason}, _State) -> + rabbit_log_connection:error("MQTT detected network error: ~p~n", [Reason]); + +do_terminate(normal, #state{proc_state = ProcState, + conn_name = ConnName}) -> + rabbit_mqtt_processor:close_connection(ProcState), + rabbit_log_connection:info("closing MQTT connection ~p (~s)~n", [self(), ConnName]), + ok; + +do_terminate(_Reason, #state{proc_state = ProcState}) -> + rabbit_mqtt_processor:close_connection(ProcState), + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + +ssl_login_name(Sock) -> + case rabbit_net:peercert(Sock) of + {ok, C} -> case rabbit_ssl:peer_cert_auth_name(C) of + unsafe -> none; + not_found -> none; + Name -> Name + end; + {error, no_peercert} -> none; + nossl -> none + end. + +%%---------------------------------------------------------------------------- + +log_tls_alert(handshake_failure, ConnStr) -> + rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: handshake failure~n", + [ConnStr]); +log_tls_alert(unknown_ca, ConnStr) -> + rabbit_log_connection:error("MQTT detected TLS certificate verification error on ~s: alert 'unknown CA'~n", + [ConnStr]); +log_tls_alert(Alert, ConnStr) -> + rabbit_log_connection:error("MQTT detected TLS upgrade error on ~s: alert ~s~n", + [ConnStr, Alert]). + +log_new_connection(#state{conn_name = ConnStr, proc_state = PState}) -> + rabbit_log_connection:info("accepting MQTT connection ~p (~s, client id: ~s)~n", + [self(), ConnStr, rabbit_mqtt_processor:info(client_id, PState)]). + +process_received_bytes(<<>>, State = #state{proc_state = ProcState, + received_connect_frame = false}) -> + MqttConn = ProcState#proc_state.connection, + case MqttConn of + undefined -> ok; + _ -> log_new_connection(State) + end, + {noreply, ensure_stats_timer(State#state{ received_connect_frame = true }), hibernate}; +process_received_bytes(<<>>, State) -> + {noreply, ensure_stats_timer(State), hibernate}; +process_received_bytes(Bytes, + State = #state{ parse_state = ParseState, + proc_state = ProcState, + conn_name = ConnStr }) -> + case parse(Bytes, ParseState) of + {more, ParseState1} -> + {noreply, + ensure_stats_timer( State #state{ parse_state = ParseState1 }), + hibernate}; + {ok, Frame, Rest} -> + case rabbit_mqtt_processor:process_frame(Frame, ProcState) of + {ok, ProcState1, ConnPid} -> + PS = rabbit_mqtt_frame:initial_state(), + process_received_bytes( + Rest, + State #state{ parse_state = PS, + proc_state = ProcState1, + connection = ConnPid }); + %% PUBLISH and more + {error, unauthorized = Reason, ProcState1} -> + rabbit_log_connection:error("MQTT connection ~s is closing due to an authorization failure~n", [ConnStr]), + {stop, {shutdown, Reason}, pstate(State, ProcState1)}; + %% CONNECT frames only + {error, unauthenticated = Reason, ProcState1} -> + rabbit_log_connection:error("MQTT connection ~s is closing due to an authentication failure~n", [ConnStr]), + {stop, {shutdown, Reason}, pstate(State, ProcState1)}; + %% CONNECT frames only + {error, invalid_client_id = Reason, ProcState1} -> + rabbit_log_connection:error("MQTT cannot accept connection ~s: client uses an invalid ID~n", [ConnStr]), + {stop, {shutdown, Reason}, pstate(State, ProcState1)}; + %% CONNECT frames only + {error, unsupported_protocol_version = Reason, ProcState1} -> + rabbit_log_connection:error("MQTT cannot accept connection ~s: incompatible protocol version~n", [ConnStr]), + {stop, {shutdown, Reason}, pstate(State, ProcState1)}; + {error, unavailable = Reason, ProcState1} -> + rabbit_log_connection:error("MQTT cannot accept connection ~s due to an internal error or unavailable component~n", + [ConnStr]), + {stop, {shutdown, Reason}, pstate(State, ProcState1)}; + {error, Reason, ProcState1} -> + rabbit_log_connection:error("MQTT protocol error on connection ~s: ~p~n", + [ConnStr, Reason]), + {stop, {shutdown, Reason}, pstate(State, ProcState1)}; + {error, Error} -> + rabbit_log_connection:error("MQTT detected a framing error on connection ~s: ~p~n", + [ConnStr, Error]), + {stop, {shutdown, Error}, State}; + {stop, ProcState1} -> + {stop, normal, pstate(State, ProcState1)} + end; + {error, {cannot_parse, Error, Stacktrace}} -> + rabbit_log_connection:error("MQTT cannot parse a frame on connection '~s', unparseable payload: ~p, error: {~p, ~p} ~n", + [ConnStr, Bytes, Error, Stacktrace]), + {stop, {shutdown, Error}, State}; + {error, Error} -> + rabbit_log_connection:error("MQTT detected a framing error on connection ~s: ~p~n", + [ConnStr, Error]), + {stop, {shutdown, Error}, State} + end. + +callback_reply(State, {ok, ProcState}) -> + {noreply, pstate(State, ProcState), hibernate}; +callback_reply(State, {error, Reason, ProcState}) -> + {stop, Reason, pstate(State, ProcState)}. + +start_keepalive(_, 0 ) -> ok; +start_keepalive(Pid, Keepalive) -> Pid ! {start_keepalives, Keepalive}. + +pstate(State = #state {}, PState = #proc_state{}) -> + State #state{ proc_state = PState }. + +%%---------------------------------------------------------------------------- +parse(Bytes, ParseState) -> + try + rabbit_mqtt_frame:parse(Bytes, ParseState) + catch + _:Reason:Stacktrace -> + {error, {cannot_parse, Reason, Stacktrace}} + end. + +send_will_and_terminate(PState, State) -> + send_will_and_terminate(PState, {shutdown, conn_closed}, State). + +send_will_and_terminate(PState, Reason, State = #state{conn_name = ConnStr}) -> + rabbit_mqtt_processor:send_will(PState), + rabbit_log_connection:debug("MQTT: about to send will message (if any) on connection ~p", [ConnStr]), + % todo: flush channel after publish + {stop, Reason, State}. + +network_error(closed, + State = #state{conn_name = ConnStr, + proc_state = PState}) -> + MqttConn = PState#proc_state.connection, + Fmt = "MQTT connection ~p will terminate because peer closed TCP connection~n", + Args = [ConnStr], + case MqttConn of + undefined -> rabbit_log_connection:debug(Fmt, Args); + _ -> rabbit_log_connection:info(Fmt, Args) + end, + send_will_and_terminate(PState, State); + +network_error(Reason, + State = #state{conn_name = ConnStr, + proc_state = PState}) -> + rabbit_log_connection:info("MQTT detected network error for ~p: ~p~n", + [ConnStr, Reason]), + send_will_and_terminate(PState, State). + +run_socket(State = #state{ connection_state = blocked }) -> + State; +run_socket(State = #state{ deferred_recv = Data }) when Data =/= undefined -> + State; +run_socket(State = #state{ await_recv = true }) -> + State; +run_socket(State = #state{ socket = Sock }) -> + rabbit_net:setopts(Sock, [{active, once}]), + State#state{ await_recv = true }. + +control_throttle(State = #state{ connection_state = Flow, + conserve = Conserve }) -> + case {Flow, Conserve orelse credit_flow:blocked()} of + {running, true} -> ok = rabbit_heartbeat:pause_monitor( + State#state.keepalive), + State #state{ connection_state = blocked }; + {blocked, false} -> ok = rabbit_heartbeat:resume_monitor( + State#state.keepalive), + run_socket(State #state{ + connection_state = running }); + {_, _} -> run_socket(State) + end. + +maybe_process_deferred_recv(State = #state{ deferred_recv = undefined }) -> + {noreply, State, hibernate}; +maybe_process_deferred_recv(State = #state{ deferred_recv = Data, socket = Sock }) -> + handle_info({tcp, Sock, Data}, + State#state{ deferred_recv = undefined }). + +maybe_emit_stats(undefined) -> + ok; +maybe_emit_stats(State) -> + rabbit_event:if_enabled(State, #state.stats_timer, + fun() -> emit_stats(State) end). + +emit_stats(State=#state{connection = C}) when C == none; C == undefined -> + %% Avoid emitting stats on terminate when the connection has not yet been + %% established, as this causes orphan entries on the stats database + State1 = rabbit_event:reset_stats_timer(State, #state.stats_timer), + ensure_stats_timer(State1); +emit_stats(State) -> + [{_, Pid}, {_, Recv_oct}, {_, Send_oct}, {_, Reductions}] = I + = infos(?SIMPLE_METRICS, State), + Infos = infos(?OTHER_METRICS, State), + rabbit_core_metrics:connection_stats(Pid, Infos), + rabbit_core_metrics:connection_stats(Pid, Recv_oct, Send_oct, Reductions), + rabbit_event:notify(connection_stats, Infos ++ I), + State1 = rabbit_event:reset_stats_timer(State, #state.stats_timer), + ensure_stats_timer(State1). + +ensure_stats_timer(State = #state{}) -> + rabbit_event:ensure_stats_timer(State, #state.stats_timer, emit_stats). + +infos(Items, State) -> [{Item, info_internal(Item, State)} || Item <- Items]. + +info_internal(pid, State) -> info_internal(connection, State); +info_internal(SockStat, #state{socket = Sock}) when SockStat =:= recv_oct; + SockStat =:= recv_cnt; + SockStat =:= send_oct; + SockStat =:= send_cnt; + SockStat =:= send_pend -> + case rabbit_net:getstat(Sock, [SockStat]) of + {ok, [{_, N}]} when is_number(N) -> N; + _ -> 0 + end; +info_internal(state, State) -> info_internal(connection_state, State); +info_internal(garbage_collection, _State) -> + rabbit_misc:get_gc_info(self()); +info_internal(reductions, _State) -> + {reductions, Reductions} = erlang:process_info(self(), reductions), + Reductions; +info_internal(conn_name, #state{conn_name = Val}) -> + rabbit_data_coercion:to_binary(Val); +info_internal(connection_state, #state{received_connect_frame = false}) -> + starting; +info_internal(connection_state, #state{connection_state = Val}) -> + Val; +info_internal(connection, #state{connection = Val}) -> + Val; +info_internal(Key, #state{proc_state = ProcState}) -> + rabbit_mqtt_processor:info(Key, ProcState). diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl new file mode 100644 index 0000000000..4b3ee95743 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl @@ -0,0 +1,23 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_retained_msg_store). + +-export([behaviour_info/1, table_name_for/1]). + +behaviour_info(callbacks) -> + [{new, 2}, + {recover, 2}, + {insert, 3}, + {lookup, 2}, + {delete, 2}, + {terminate, 1}]; +behaviour_info(_Other) -> + undefined. + +table_name_for(VHost) -> + rabbit_mqtt_util:vhost_name_to_table_name(VHost). diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl new file mode 100644 index 0000000000..03c5942d35 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl @@ -0,0 +1,54 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_retained_msg_store_dets). + +-behaviour(rabbit_mqtt_retained_msg_store). +-include("rabbit_mqtt.hrl"). + +-export([new/2, recover/2, insert/3, lookup/2, delete/2, terminate/1]). + +-record(store_state, { + %% DETS table name + table +}). + + +new(Dir, VHost) -> + Tid = open_table(Dir, VHost), + #store_state{table = Tid}. + +recover(Dir, VHost) -> + case open_table(Dir, VHost) of + {error, _} -> {error, uninitialized}; + {ok, Tid} -> {ok, #store_state{table = Tid}} + end. + +insert(Topic, Msg, #store_state{table = T}) -> + ok = dets:insert(T, #retained_message{topic = Topic, mqtt_msg = Msg}). + +lookup(Topic, #store_state{table = T}) -> + case dets:lookup(T, Topic) of + [] -> not_found; + [Entry] -> Entry + end. + +delete(Topic, #store_state{table = T}) -> + ok = dets:delete(T, Topic). + +terminate(#store_state{table = T}) -> + ok = dets:close(T). + +open_table(Dir, VHost) -> + dets:open_file(rabbit_mqtt_retained_msg_store:table_name_for(VHost), + table_options(rabbit_mqtt_util:path_for(Dir, VHost, ".dets"))). + +table_options(Path) -> + [{type, set}, {keypos, #retained_message.topic}, + {file, Path}, {ram_file, true}, {repair, true}, + {auto_save, rabbit_misc:get_env(rabbit_mqtt, + retained_message_store_dets_sync_interval, 2000)}]. diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl new file mode 100644 index 0000000000..9080a6f4cf --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl @@ -0,0 +1,54 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_retained_msg_store_ets). + +-behaviour(rabbit_mqtt_retained_msg_store). +-include("rabbit_mqtt.hrl"). + +-export([new/2, recover/2, insert/3, lookup/2, delete/2, terminate/1]). + +-record(store_state, { + %% ETS table ID + table, + %% where the table is stored on disk + filename +}). + + +new(Dir, VHost) -> + Path = rabbit_mqtt_util:path_for(Dir, VHost), + TableName = rabbit_mqtt_retained_msg_store:table_name_for(VHost), + file:delete(Path), + Tid = ets:new(TableName, [set, public, {keypos, #retained_message.topic}]), + #store_state{table = Tid, filename = Path}. + +recover(Dir, VHost) -> + Path = rabbit_mqtt_util:path_for(Dir, VHost), + case ets:file2tab(Path) of + {ok, Tid} -> file:delete(Path), + {ok, #store_state{table = Tid, filename = Path}}; + {error, _} -> {error, uninitialized} + end. + +insert(Topic, Msg, #store_state{table = T}) -> + true = ets:insert(T, #retained_message{topic = Topic, mqtt_msg = Msg}), + ok. + +lookup(Topic, #store_state{table = T}) -> + case ets:lookup(T, Topic) of + [] -> not_found; + [Entry] -> Entry + end. + +delete(Topic, #store_state{table = T}) -> + true = ets:delete(T, Topic), + ok. + +terminate(#store_state{table = T, filename = Path}) -> + ok = ets:tab2file(T, Path, + [{extended_info, [object_count]}]). diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_noop.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_noop.erl new file mode 100644 index 0000000000..382ffbc63d --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_noop.erl @@ -0,0 +1,31 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_retained_msg_store_noop). + +-behaviour(rabbit_mqtt_retained_msg_store). +-include("rabbit_mqtt.hrl"). + +-export([new/2, recover/2, insert/3, lookup/2, delete/2, terminate/1]). + +new(_Dir, _VHost) -> + ok. + +recover(_Dir, _VHost) -> + {ok, ok}. + +insert(_Topic, _Msg, _State) -> + ok. + +lookup(_Topic, _State) -> + not_found. + +delete(_Topic, _State) -> + ok. + +terminate(_State) -> + ok. diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl new file mode 100644 index 0000000000..2aa873ecfb --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl @@ -0,0 +1,98 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_retainer). + +-behaviour(gen_server2). +-include("rabbit_mqtt.hrl"). +-include("rabbit_mqtt_frame.hrl"). + +-export([init/1, handle_call/3, handle_cast/2, handle_info/2, + terminate/2, code_change/3, start_link/2]). + +-export([retain/3, fetch/2, clear/2, store_module/0]). + +-define(SERVER, ?MODULE). +-define(TIMEOUT, 30000). + +-record(retainer_state, {store_mod, + store}). + +-spec retain(pid(), string(), mqtt_msg()) -> + {noreply, NewState :: term()} | + {noreply, NewState :: term(), timeout() | hibernate} | + {stop, Reason :: term(), NewState :: term()}. + +%%---------------------------------------------------------------------------- + +start_link(RetainStoreMod, VHost) -> + gen_server2:start_link(?MODULE, [RetainStoreMod, VHost], []). + +retain(Pid, Topic, Msg = #mqtt_msg{retain = true}) -> + gen_server2:cast(Pid, {retain, Topic, Msg}); + +retain(_Pid, _Topic, Msg = #mqtt_msg{retain = false}) -> + throw({error, {retain_is_false, Msg}}). + +fetch(Pid, Topic) -> + gen_server2:call(Pid, {fetch, Topic}, ?TIMEOUT). + +clear(Pid, Topic) -> + gen_server2:cast(Pid, {clear, Topic}). + +%%---------------------------------------------------------------------------- + +init([StoreMod, VHost]) -> + process_flag(trap_exit, true), + State = case StoreMod:recover(store_dir(), VHost) of + {ok, Store} -> #retainer_state{store = Store, + store_mod = StoreMod}; + {error, _} -> #retainer_state{store = StoreMod:new(store_dir(), VHost), + store_mod = StoreMod} + end, + {ok, State}. + +store_module() -> + case application:get_env(rabbitmq_mqtt, retained_message_store) of + {ok, Mod} -> Mod; + undefined -> undefined + end. + +%%---------------------------------------------------------------------------- + +handle_cast({retain, Topic, Msg}, + State = #retainer_state{store = Store, store_mod = Mod}) -> + ok = Mod:insert(Topic, Msg, Store), + {noreply, State}; +handle_cast({clear, Topic}, + State = #retainer_state{store = Store, store_mod = Mod}) -> + ok = Mod:delete(Topic, Store), + {noreply, State}. + +handle_call({fetch, Topic}, _From, + State = #retainer_state{store = Store, store_mod = Mod}) -> + Reply = case Mod:lookup(Topic, Store) of + #retained_message{mqtt_msg = Msg} -> Msg; + not_found -> undefined + end, + {reply, Reply, State}. + +handle_info(stop, State) -> + {stop, normal, State}; + +handle_info(Info, State) -> + {stop, {unknown_info, Info}, State}. + +store_dir() -> + rabbit_mnesia:dir(). + +terminate(_Reason, #retainer_state{store = Store, store_mod = Mod}) -> + Mod:terminate(Store), + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl new file mode 100644 index 0000000000..86b54ce3d7 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl @@ -0,0 +1,60 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_retainer_sup). +-behaviour(supervisor2). + +-export([start_link/1, init/1, start_child/2,start_child/1, child_for_vhost/1, + delete_child/1]). + +-define(ENCODING, utf8). + +-spec start_child(binary()) -> supervisor2:startchild_ret(). +-spec start_child(term(), binary()) -> supervisor2:startchild_ret(). + +start_link(SupName) -> + supervisor2:start_link(SupName, ?MODULE, []). + +child_for_vhost(VHost) when is_binary(VHost) -> + case rabbit_mqtt_retainer_sup:start_child(VHost) of + {ok, Pid} -> Pid; + {error, {already_started, Pid}} -> Pid + end. + +start_child(VHost) when is_binary(VHost) -> + start_child(rabbit_mqtt_retainer:store_module(), VHost). + +start_child(RetainStoreMod, VHost) -> + supervisor2:start_child(?MODULE, + + {vhost_to_atom(VHost), + {rabbit_mqtt_retainer, start_link, [RetainStoreMod, VHost]}, + permanent, 60, worker, [rabbit_mqtt_retainer]}). + +delete_child(VHost) -> + Id = vhost_to_atom(VHost), + ok = supervisor2:terminate_child(?MODULE, Id), + ok = supervisor2:delete_child(?MODULE, Id). + +init([]) -> + Mod = rabbit_mqtt_retainer:store_module(), + rabbit_log:info("MQTT retained message store: ~p~n", + [Mod]), + {ok, {{one_for_one, 5, 5}, child_specs(Mod, rabbit_vhost:list_names())}}. + +child_specs(Mod, VHosts) -> + %% see start_child/2 + [{vhost_to_atom(V), + {rabbit_mqtt_retainer, start_link, [Mod, V]}, + permanent, infinity, worker, [rabbit_mqtt_retainer]} || V <- VHosts]. + +vhost_to_atom(VHost) -> + %% we'd like to avoid any conversion here because + %% this atom isn't meant to be human-readable, only + %% unique. This makes sure we don't get noisy process restarts + %% with really unusual vhost names used by various HTTP API test suites + rabbit_data_coercion:to_atom(VHost, latin1). diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl new file mode 100644 index 0000000000..c00be457d3 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_sup.erl @@ -0,0 +1,73 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_sup). +-behaviour(supervisor2). + +-include_lib("rabbit_common/include/rabbit.hrl"). + +-export([start_link/2, init/1, stop_listeners/0]). + +-define(TCP_PROTOCOL, 'mqtt'). +-define(TLS_PROTOCOL, 'mqtt/ssl'). + +start_link(Listeners, []) -> + supervisor2:start_link({local, ?MODULE}, ?MODULE, [Listeners]). + +init([{Listeners, SslListeners0}]) -> + NumTcpAcceptors = application:get_env(rabbitmq_mqtt, num_tcp_acceptors, 10), + {ok, SocketOpts} = application:get_env(rabbitmq_mqtt, tcp_listen_options), + {SslOpts, NumSslAcceptors, SslListeners} + = case SslListeners0 of + [] -> {none, 0, []}; + _ -> {rabbit_networking:ensure_ssl(), + application:get_env(rabbitmq_mqtt, num_ssl_acceptors, 10), + case rabbit_networking:poodle_check('MQTT') of + ok -> SslListeners0; + danger -> [] + end} + end, + {ok, {{one_for_all, 10, 10}, + [{rabbit_mqtt_retainer_sup, + {rabbit_mqtt_retainer_sup, start_link, [{local, rabbit_mqtt_retainer_sup}]}, + transient, ?SUPERVISOR_WAIT, supervisor, [rabbit_mqtt_retainer_sup]} | + listener_specs(fun tcp_listener_spec/1, + [SocketOpts, NumTcpAcceptors], Listeners) ++ + listener_specs(fun ssl_listener_spec/1, + [SocketOpts, SslOpts, NumSslAcceptors], SslListeners)]}}. + +stop_listeners() -> + rabbit_networking:stop_ranch_listener_of_protocol(?TCP_PROTOCOL), + rabbit_networking:stop_ranch_listener_of_protocol(?TLS_PROTOCOL), + ok. + +%% +%% Implementation +%% + +listener_specs(Fun, Args, Listeners) -> + [Fun([Address | Args]) || + Listener <- Listeners, + Address <- rabbit_networking:tcp_listener_addresses(Listener)]. + +tcp_listener_spec([Address, SocketOpts, NumAcceptors]) -> + rabbit_networking:tcp_listener_spec( + rabbit_mqtt_listener_sup, Address, SocketOpts, + transport(?TCP_PROTOCOL), rabbit_mqtt_connection_sup, [], + mqtt, NumAcceptors, "MQTT TCP listener"). + +ssl_listener_spec([Address, SocketOpts, SslOpts, NumAcceptors]) -> + rabbit_networking:tcp_listener_spec( + rabbit_mqtt_listener_sup, Address, SocketOpts ++ SslOpts, + transport(?TLS_PROTOCOL), rabbit_mqtt_connection_sup, [], + 'mqtt/ssl', NumAcceptors, "MQTT TLS listener"). + +transport(Protocol) -> + case Protocol of + ?TCP_PROTOCOL -> ranch_tcp; + ?TLS_PROTOCOL -> ranch_ssl + end. diff --git a/deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl b/deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl new file mode 100644 index 0000000000..0fbe7e8a85 --- /dev/null +++ b/deps/rabbitmq_mqtt/src/rabbit_mqtt_util.erl @@ -0,0 +1,139 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(rabbit_mqtt_util). + +-include("rabbit_mqtt.hrl"). + +-export([subcription_queue_name/1, + gen_client_id/0, + env/1, + table_lookup/2, + path_for/2, + path_for/3, + vhost_name_to_table_name/1, + get_topic_translation_funs/0 + ]). + +-define(MAX_TOPIC_TRANSLATION_CACHE_SIZE, 12). + +subcription_queue_name(ClientId) -> + Base = "mqtt-subscription-" ++ ClientId ++ "qos", + {list_to_binary(Base ++ "0"), list_to_binary(Base ++ "1")}. + +cached(CacheName, Fun, Arg) -> + Cache = + case get(CacheName) of + undefined -> + []; + Other -> + Other + end, + case lists:keyfind(Arg, 1, Cache) of + {_, V} -> + V; + false -> + V = Fun(Arg), + CacheTail = lists:sublist(Cache, ?MAX_TOPIC_TRANSLATION_CACHE_SIZE - 1), + put(CacheName, [{Arg, V} | CacheTail]), + V + end. + +to_amqp(T0) -> + T1 = string:replace(T0, "/", ".", all), + T2 = string:replace(T1, "+", "*", all), + erlang:iolist_to_binary(T2). + +to_mqtt(T0) -> + T1 = string:replace(T0, "*", "+", all), + T2 = string:replace(T1, ".", "/", all), + erlang:iolist_to_binary(T2). + +%% amqp mqtt descr +%% * + match one topic level +%% # # match multiple topic levels +%% . / topic level separator +get_topic_translation_funs() -> + SparkplugB = env(sparkplug), + ToAmqpFun = fun(Topic) -> + cached(mta_cache, fun to_amqp/1, Topic) + end, + ToMqttFun = fun(Topic) -> + cached(atm_cache, fun to_mqtt/1, Topic) + end, + {M2AFun, A2MFun} = case SparkplugB of + true -> + {ok, M2A_SpRe} = re:compile("^sp[AB]v\\d+\\.\\d+/"), + {ok, A2M_SpRe} = re:compile("^sp[AB]v\\d+___\\d+\\."), + M2A = fun(T0) -> + case re:run(T0, M2A_SpRe) of + nomatch -> + ToAmqpFun(T0); + {match, _} -> + T1 = string:replace(T0, ".", "___", leading), + ToAmqpFun(T1) + end + end, + A2M = fun(T0) -> + case re:run(T0, A2M_SpRe) of + nomatch -> + ToMqttFun(T0); + {match, _} -> + T1 = ToMqttFun(T0), + T2 = string:replace(T1, "___", ".", leading), + erlang:iolist_to_binary(T2) + end + end, + {M2A, A2M}; + _ -> + M2A = fun(T) -> + ToAmqpFun(T) + end, + A2M = fun(T) -> + ToMqttFun(T) + end, + {M2A, A2M} + end, + {ok, {mqtt2amqp_fun, M2AFun}, {amqp2mqtt_fun, A2MFun}}. + +gen_client_id() -> + lists:nthtail(1, rabbit_guid:string(rabbit_guid:gen_secure(), [])). + +env(Key) -> + case application:get_env(rabbitmq_mqtt, Key) of + {ok, Val} -> coerce_env_value(Key, Val); + undefined -> undefined + end. + +%% TODO: move to rabbit_common +coerce_env_value(default_pass, Val) -> rabbit_data_coercion:to_binary(Val); +coerce_env_value(default_user, Val) -> rabbit_data_coercion:to_binary(Val); +coerce_env_value(exchange, Val) -> rabbit_data_coercion:to_binary(Val); +coerce_env_value(vhost, Val) -> rabbit_data_coercion:to_binary(Val); +coerce_env_value(_, Val) -> Val. + +table_lookup(undefined, _Key) -> + undefined; +table_lookup(Table, Key) -> + rabbit_misc:table_lookup(Table, Key). + +vhost_name_to_dir_name(VHost) -> + vhost_name_to_dir_name(VHost, ".ets"). +vhost_name_to_dir_name(VHost, Suffix) -> + <<Num:128>> = erlang:md5(VHost), + "mqtt_retained_" ++ rabbit_misc:format("~36.16.0b", [Num]) ++ Suffix. + +path_for(Dir, VHost) -> + filename:join(Dir, vhost_name_to_dir_name(VHost)). + +path_for(Dir, VHost, Suffix) -> + filename:join(Dir, vhost_name_to_dir_name(VHost, Suffix)). + + +vhost_name_to_table_name(VHost) -> + <<Num:128>> = erlang:md5(VHost), + list_to_atom("rabbit_mqtt_retained_" ++ rabbit_misc:format("~36.16.0b", [Num])). diff --git a/deps/rabbitmq_mqtt/test/auth_SUITE.erl b/deps/rabbitmq_mqtt/test/auth_SUITE.erl new file mode 100644 index 0000000000..7368139d95 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/auth_SUITE.erl @@ -0,0 +1,493 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% +-module(auth_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-define(CONNECT_TIMEOUT, 10000). + +all() -> + [{group, anonymous_no_ssl_user}, + {group, anonymous_ssl_user}, + {group, no_ssl_user}, + {group, ssl_user}, + {group, client_id_propagation}]. + +groups() -> + [{anonymous_ssl_user, [], + [anonymous_auth_success, + user_credentials_auth, + ssl_user_auth_success, + ssl_user_vhost_not_allowed, + ssl_user_vhost_parameter_mapping_success, + ssl_user_vhost_parameter_mapping_not_allowed, + ssl_user_vhost_parameter_mapping_vhost_does_not_exist, + ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_mapping + ]}, + {anonymous_no_ssl_user, [], + [anonymous_auth_success, + user_credentials_auth, + port_vhost_mapping_success, + port_vhost_mapping_success_no_mapping, + port_vhost_mapping_not_allowed, + port_vhost_mapping_vhost_does_not_exist + %% SSL auth will succeed, because we cannot ignore anonymous + ]}, + {ssl_user, [], + [anonymous_auth_failure, + user_credentials_auth, + ssl_user_auth_success, + ssl_user_vhost_not_allowed, + ssl_user_vhost_parameter_mapping_success, + ssl_user_vhost_parameter_mapping_not_allowed, + ssl_user_vhost_parameter_mapping_vhost_does_not_exist, + ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_mapping + ]}, + {no_ssl_user, [], + [anonymous_auth_failure, + user_credentials_auth, + ssl_user_auth_failure, + port_vhost_mapping_success, + port_vhost_mapping_success_no_mapping, + port_vhost_mapping_not_allowed, + port_vhost_mapping_vhost_does_not_exist + ]}, + {client_id_propagation, [], + [client_id_propagation] + } + ]. + +init_per_suite(Config) -> + rabbit_ct_helpers:log_environment(), + Config. + +end_per_suite(Config) -> + Config. + +init_per_group(Group, Config) -> + Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"), + Config1 = rabbit_ct_helpers:set_config(Config, [ + {rmq_nodename_suffix, Suffix}, + {rmq_certspwd, "bunnychow"} + ]), + MqttConfig = mqtt_config(Group), + AuthConfig = auth_config(Group), + rabbit_ct_helpers:run_setup_steps(Config1, + [ fun(Conf) -> merge_app_env(MqttConfig, Conf) end ] ++ + [ fun(Conf) -> case AuthConfig of + undefined -> Conf; + _ -> merge_app_env(AuthConfig, Conf) + end + end ] ++ + rabbit_ct_broker_helpers:setup_steps() ++ + rabbit_ct_client_helpers:setup_steps()). + +end_per_group(_, Config) -> + rabbit_ct_helpers:run_teardown_steps(Config, + rabbit_ct_client_helpers:teardown_steps() ++ + rabbit_ct_broker_helpers:teardown_steps()). + +merge_app_env(MqttConfig, Config) -> + rabbit_ct_helpers:merge_app_env(Config, MqttConfig). + +mqtt_config(anonymous_ssl_user) -> + {rabbitmq_mqtt, [{ssl_cert_login, true}, + {allow_anonymous, true}]}; +mqtt_config(anonymous_no_ssl_user) -> + {rabbitmq_mqtt, [{ssl_cert_login, false}, + {allow_anonymous, true}]}; +mqtt_config(ssl_user) -> + {rabbitmq_mqtt, [{ssl_cert_login, true}, + {allow_anonymous, false}]}; +mqtt_config(no_ssl_user) -> + {rabbitmq_mqtt, [{ssl_cert_login, false}, + {allow_anonymous, false}]}; +mqtt_config(client_id_propagation) -> + {rabbitmq_mqtt, [{ssl_cert_login, true}, + {allow_anonymous, true}]}. + +auth_config(client_id_propagation) -> + {rabbit, [ + {auth_backends, [rabbit_auth_backend_mqtt_mock]} + ] + }; +auth_config(_) -> + undefined. + +init_per_testcase(Testcase, Config) when Testcase == ssl_user_auth_success; + Testcase == ssl_user_auth_failure -> + Config1 = set_cert_user_on_default_vhost(Config), + rabbit_ct_helpers:testcase_started(Config1, Testcase); +init_per_testcase(ssl_user_vhost_parameter_mapping_success, Config) -> + Config1 = set_cert_user_on_default_vhost(Config), + User = ?config(temp_ssl_user, Config1), + ok = rabbit_ct_broker_helpers:clear_permissions(Config1, User, <<"/">>), + Config2 = set_vhost_for_cert_user(Config1, User), + rabbit_ct_helpers:testcase_started(Config2, ssl_user_vhost_parameter_mapping_success); +init_per_testcase(ssl_user_vhost_parameter_mapping_not_allowed, Config) -> + Config1 = set_cert_user_on_default_vhost(Config), + User = ?config(temp_ssl_user, Config1), + Config2 = set_vhost_for_cert_user(Config1, User), + VhostForCertUser = ?config(temp_vhost_for_ssl_user, Config2), + ok = rabbit_ct_broker_helpers:clear_permissions(Config2, User, VhostForCertUser), + rabbit_ct_helpers:testcase_started(Config2, ssl_user_vhost_parameter_mapping_not_allowed); +init_per_testcase(user_credentials_auth, Config) -> + User = <<"new-user">>, + Pass = <<"new-user-pass">>, + ok = rabbit_ct_broker_helpers:add_user(Config, 0, User, Pass), + ok = rabbit_ct_broker_helpers:set_full_permissions(Config, User, <<"/">>), + Config1 = rabbit_ct_helpers:set_config(Config, [{new_user, User}, + {new_user_pass, Pass}]), + rabbit_ct_helpers:testcase_started(Config1, user_credentials_auth); +init_per_testcase(ssl_user_vhost_not_allowed, Config) -> + Config1 = set_cert_user_on_default_vhost(Config), + User = ?config(temp_ssl_user, Config1), + ok = rabbit_ct_broker_helpers:clear_permissions(Config1, User, <<"/">>), + rabbit_ct_helpers:testcase_started(Config1, ssl_user_vhost_not_allowed); +init_per_testcase(ssl_user_vhost_parameter_mapping_vhost_does_not_exist, Config) -> + Config1 = set_cert_user_on_default_vhost(Config), + User = ?config(temp_ssl_user, Config1), + Config2 = set_vhost_for_cert_user(Config1, User), + VhostForCertUser = ?config(temp_vhost_for_ssl_user, Config2), + ok = rabbit_ct_broker_helpers:delete_vhost(Config, VhostForCertUser), + rabbit_ct_helpers:testcase_started(Config1, ssl_user_vhost_parameter_mapping_vhost_does_not_exist); +init_per_testcase(port_vhost_mapping_success, Config) -> + User = <<"guest">>, + Config1 = set_vhost_for_port_vhost_mapping_user(Config, User), + rabbit_ct_broker_helpers:clear_permissions(Config1, User, <<"/">>), + rabbit_ct_helpers:testcase_started(Config1, port_vhost_mapping_success); +init_per_testcase(port_vhost_mapping_success_no_mapping, Config) -> + User = <<"guest">>, + Config1 = set_vhost_for_port_vhost_mapping_user(Config, User), + PortToVHostMappingParameter = [ + {<<"1">>, <<"unlikely to exist">>}, + {<<"2">>, <<"unlikely to exist">>}], + ok = rabbit_ct_broker_helpers:set_global_parameter(Config, mqtt_port_to_vhost_mapping, PortToVHostMappingParameter), + VHost = ?config(temp_vhost_for_port_mapping, Config1), + rabbit_ct_broker_helpers:clear_permissions(Config1, User, VHost), + rabbit_ct_helpers:testcase_started(Config1, port_vhost_mapping_success_no_mapping); +init_per_testcase(port_vhost_mapping_not_allowed, Config) -> + User = <<"guest">>, + Config1 = set_vhost_for_port_vhost_mapping_user(Config, User), + rabbit_ct_broker_helpers:clear_permissions(Config1, User, <<"/">>), + VHost = ?config(temp_vhost_for_port_mapping, Config1), + rabbit_ct_broker_helpers:clear_permissions(Config1, User, VHost), + rabbit_ct_helpers:testcase_started(Config1, port_vhost_mapping_not_allowed); +init_per_testcase(port_vhost_mapping_vhost_does_not_exist, Config) -> + User = <<"guest">>, + Config1 = set_vhost_for_port_vhost_mapping_user(Config, User), + rabbit_ct_broker_helpers:clear_permissions(Config1, User, <<"/">>), + VHost = ?config(temp_vhost_for_port_mapping, Config1), + rabbit_ct_broker_helpers:delete_vhost(Config1, VHost), + rabbit_ct_helpers:testcase_started(Config1, port_vhost_mapping_vhost_does_not_exist); +init_per_testcase(ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_mapping, Config) -> + Config1 = set_cert_user_on_default_vhost(Config), + User = ?config(temp_ssl_user, Config1), + Config2 = set_vhost_for_cert_user(Config1, User), + + Config3 = set_vhost_for_port_vhost_mapping_user(Config2, User), + VhostForPortMapping = ?config(mqtt_port_to_vhost_mapping, Config2), + rabbit_ct_broker_helpers:clear_permissions(Config3, User, VhostForPortMapping), + + rabbit_ct_broker_helpers:clear_permissions(Config3, User, <<"/">>), + rabbit_ct_helpers:testcase_started(Config3, ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_mapping); +init_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_started(Config, Testcase). + +set_cert_user_on_default_vhost(Config) -> + CertsDir = ?config(rmq_certsdir, Config), + CertFile = filename:join([CertsDir, "client", "cert.pem"]), + {ok, CertBin} = file:read_file(CertFile), + [{'Certificate', Cert, not_encrypted}] = public_key:pem_decode(CertBin), + UserBin = rabbit_ct_broker_helpers:rpc(Config, 0, + rabbit_ssl, + peer_cert_auth_name, + [Cert]), + User = binary_to_list(UserBin), + ok = rabbit_ct_broker_helpers:add_user(Config, 0, User, ""), + ok = rabbit_ct_broker_helpers:set_full_permissions(Config, User, <<"/">>), + rabbit_ct_helpers:set_config(Config, [{temp_ssl_user, User}]). + +set_vhost_for_cert_user(Config, User) -> + VhostForCertUser = <<"vhost_for_cert_user">>, + UserToVHostMappingParameter = [ + {rabbit_data_coercion:to_binary(User), VhostForCertUser}, + {<<"O=client,CN=unlikelytoexistuser">>, <<"vhost2">>} + ], + ok = rabbit_ct_broker_helpers:add_vhost(Config, VhostForCertUser), + ok = rabbit_ct_broker_helpers:set_full_permissions(Config, User, VhostForCertUser), + ok = rabbit_ct_broker_helpers:set_global_parameter(Config, mqtt_default_vhosts, UserToVHostMappingParameter), + rabbit_ct_helpers:set_config(Config, [{temp_vhost_for_ssl_user, VhostForCertUser}]). + +set_vhost_for_port_vhost_mapping_user(Config, User) -> + VhostForPortMapping = <<"vhost_for_port_vhost_mapping">>, + Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + TlsPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt_tls), + PortToVHostMappingParameter = [ + {integer_to_binary(Port), VhostForPortMapping}, + {<<"1884">>, <<"vhost2">>}, + {integer_to_binary(TlsPort), VhostForPortMapping}, + {<<"8884">>, <<"vhost2">>} + + ], + ok = rabbit_ct_broker_helpers:add_vhost(Config, VhostForPortMapping), + ok = rabbit_ct_broker_helpers:set_full_permissions(Config, User, VhostForPortMapping), + ok = rabbit_ct_broker_helpers:set_global_parameter(Config, mqtt_port_to_vhost_mapping, PortToVHostMappingParameter), + rabbit_ct_helpers:set_config(Config, [{temp_vhost_for_port_mapping, VhostForPortMapping}]). + +end_per_testcase(Testcase, Config) when Testcase == ssl_user_auth_success; + Testcase == ssl_user_auth_failure; + Testcase == ssl_user_vhost_not_allowed -> + delete_cert_user(Config), + rabbit_ct_helpers:testcase_finished(Config, Testcase); +end_per_testcase(TestCase, Config) when TestCase == ssl_user_vhost_parameter_mapping_success; + TestCase == ssl_user_vhost_parameter_mapping_not_allowed -> + delete_cert_user(Config), + VhostForCertUser = ?config(temp_vhost_for_ssl_user, Config), + ok = rabbit_ct_broker_helpers:delete_vhost(Config, VhostForCertUser), + ok = rabbit_ct_broker_helpers:clear_global_parameter(Config, mqtt_default_vhosts), + rabbit_ct_helpers:testcase_finished(Config, TestCase); +end_per_testcase(user_credentials_auth, Config) -> + User = ?config(new_user, Config), + {ok,_} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["delete_user", User]), + rabbit_ct_helpers:testcase_finished(Config, user_credentials_auth); +end_per_testcase(ssl_user_vhost_parameter_mapping_vhost_does_not_exist, Config) -> + delete_cert_user(Config), + ok = rabbit_ct_broker_helpers:clear_global_parameter(Config, mqtt_default_vhosts), + rabbit_ct_helpers:testcase_finished(Config, ssl_user_vhost_parameter_mapping_vhost_does_not_exist); +end_per_testcase(Testcase, Config) when Testcase == port_vhost_mapping_success; + Testcase == port_vhost_mapping_not_allowed; + Testcase == port_vhost_mapping_success_no_mapping -> + User = <<"guest">>, + rabbit_ct_broker_helpers:set_full_permissions(Config, User, <<"/">>), + VHost = ?config(temp_vhost_for_port_mapping, Config), + ok = rabbit_ct_broker_helpers:delete_vhost(Config, VHost), + ok = rabbit_ct_broker_helpers:clear_global_parameter(Config, mqtt_port_to_vhost_mapping), + rabbit_ct_helpers:testcase_finished(Config, Testcase); +end_per_testcase(port_vhost_mapping_vhost_does_not_exist, Config) -> + User = <<"guest">>, + ok = rabbit_ct_broker_helpers:set_full_permissions(Config, User, <<"/">>), + ok = rabbit_ct_broker_helpers:clear_global_parameter(Config, mqtt_port_to_vhost_mapping), + rabbit_ct_helpers:testcase_finished(Config, port_vhost_mapping_vhost_does_not_exist); +end_per_testcase(ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_mapping, Config) -> + delete_cert_user(Config), + VhostForCertUser = ?config(temp_vhost_for_ssl_user, Config), + ok = rabbit_ct_broker_helpers:delete_vhost(Config, VhostForCertUser), + ok = rabbit_ct_broker_helpers:clear_global_parameter(Config, mqtt_default_vhosts), + + VHostForPortVHostMapping = ?config(temp_vhost_for_port_mapping, Config), + ok = rabbit_ct_broker_helpers:delete_vhost(Config, VHostForPortVHostMapping), + ok = rabbit_ct_broker_helpers:clear_global_parameter(Config, mqtt_port_to_vhost_mapping), + rabbit_ct_helpers:testcase_finished(Config, ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_mapping); +end_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_finished(Config, Testcase). + +delete_cert_user(Config) -> + User = ?config(temp_ssl_user, Config), + {ok,_} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["delete_user", User]). + +anonymous_auth_success(Config) -> + expect_successful_connection(fun connect_anonymous/1, Config). + +anonymous_auth_failure(Config) -> + expect_authentication_failure(fun connect_anonymous/1, Config). + + +ssl_user_auth_success(Config) -> + expect_successful_connection(fun connect_ssl/1, Config). + +ssl_user_auth_failure(Config) -> + expect_authentication_failure(fun connect_ssl/1, Config). + +user_credentials_auth(Config) -> + NewUser = ?config(new_user, Config), + NewUserPass = ?config(new_user_pass, Config), + + expect_successful_connection( + fun(Conf) -> connect_user(NewUser, NewUserPass, Conf) end, + Config), + + expect_successful_connection( + fun(Conf) -> connect_user(<<"guest">>, <<"guest">>, Conf) end, + Config), + + expect_successful_connection( + fun(Conf) -> connect_user(<<"/:guest">>, <<"guest">>, Conf) end, + Config), + + expect_authentication_failure( + fun(Conf) -> connect_user(NewUser, <<"invalid_pass">>, Conf) end, + Config), + + expect_authentication_failure( + fun(Conf) -> connect_user(undefined, <<"pass">>, Conf) end, + Config), + + expect_authentication_failure( + fun(Conf) -> connect_user(NewUser, undefined, Conf) end, + Config), + + expect_authentication_failure( + fun(Conf) -> connect_user(<<"non-existing-vhost:guest">>, <<"guest">>, Conf) end, + Config). + +ssl_user_vhost_parameter_mapping_success(Config) -> + expect_successful_connection(fun connect_ssl/1, Config). + +ssl_user_vhost_parameter_mapping_not_allowed(Config) -> + expect_authentication_failure(fun connect_ssl/1, Config). + +ssl_user_vhost_not_allowed(Config) -> + expect_authentication_failure(fun connect_ssl/1, Config). + +ssl_user_vhost_parameter_mapping_vhost_does_not_exist(Config) -> + expect_authentication_failure(fun connect_ssl/1, Config). + +port_vhost_mapping_success(Config) -> + expect_successful_connection( + fun(Conf) -> connect_user(<<"guest">>, <<"guest">>, Conf) end, + Config). + +port_vhost_mapping_success_no_mapping(Config) -> + %% no vhost mapping for the port, falling back to default vhost + %% where the user can connect + expect_successful_connection( + fun(Conf) -> connect_user(<<"guest">>, <<"guest">>, Conf) end, + Config + ). + +port_vhost_mapping_not_allowed(Config) -> + expect_authentication_failure( + fun(Conf) -> connect_user(<<"guest">>, <<"guest">>, Conf) end, + Config + ). + +port_vhost_mapping_vhost_does_not_exist(Config) -> + expect_authentication_failure( + fun(Conf) -> connect_user(<<"guest">>, <<"guest">>, Conf) end, + Config + ). + +ssl_user_port_vhost_mapping_takes_precedence_over_cert_vhost_mapping(Config) -> + expect_successful_connection(fun connect_ssl/1, Config). + +connect_anonymous(Config) -> + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + emqttc:start_link([{host, "localhost"}, + {port, P}, + {client_id, <<"simpleClient">>}, + {proto_ver, 3}, + {logger, info}]). + +connect_ssl(Config) -> + CertsDir = ?config(rmq_certsdir, Config), + SSLConfig = [{cacertfile, filename:join([CertsDir, "testca", "cacert.pem"])}, + {certfile, filename:join([CertsDir, "client", "cert.pem"])}, + {keyfile, filename:join([CertsDir, "client", "key.pem"])}], + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt_tls), + emqttc:start_link([{host, "localhost"}, + {port, P}, + {client_id, <<"simpleClient">>}, + {proto_ver, 3}, + {logger, info}, + {ssl, SSLConfig}]). + +client_id_propagation(Config) -> + ok = rabbit_ct_broker_helpers:add_code_path_to_all_nodes(Config, + rabbit_auth_backend_mqtt_mock), + ClientId = <<"client-id-propagation">>, + {ok, C} = connect_user(<<"client-id-propagation">>, <<"client-id-propagation">>, + Config, ClientId), + receive {mqttc, C, connected} -> ok + after ?CONNECT_TIMEOUT -> exit(emqttc_connection_timeout) + end, + emqttc:subscribe(C, <<"TopicA">>, qos0), + [{authentication, AuthProps}] = rabbit_ct_broker_helpers:rpc(Config, 0, + rabbit_auth_backend_mqtt_mock, + get, + [authentication]), + ?assertEqual(ClientId, proplists:get_value(client_id, AuthProps)), + + [{vhost_access, AuthzData}] = rabbit_ct_broker_helpers:rpc(Config, 0, + rabbit_auth_backend_mqtt_mock, + get, + [vhost_access]), + ?assertEqual(ClientId, maps:get(<<"client_id">>, AuthzData)), + + [{resource_access, AuthzContext}] = rabbit_ct_broker_helpers:rpc(Config, 0, + rabbit_auth_backend_mqtt_mock, + get, + [resource_access]), + ?assertEqual(true, maps:size(AuthzContext) > 0), + ?assertEqual(ClientId, maps:get(<<"client_id">>, AuthzContext)), + + [{topic_access, TopicContext}] = rabbit_ct_broker_helpers:rpc(Config, 0, + rabbit_auth_backend_mqtt_mock, + get, + [topic_access]), + VariableMap = maps:get(variable_map, TopicContext), + ?assertEqual(ClientId, maps:get(<<"client_id">>, VariableMap)), + + emqttc:disconnect(C). + +connect_user(User, Pass, Config) -> + connect_user(User, Pass, Config, User). +connect_user(User, Pass, Config, ClientID) -> + Creds = case User of + undefined -> []; + _ -> [{username, User}] + end ++ case Pass of + undefined -> []; + _ -> [{password, Pass}] + end, + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + emqttc:start_link([{host, "localhost"}, + {port, P}, + {client_id, ClientID}, + {proto_ver, 3}, + {logger, info}] ++ Creds). + +expect_successful_connection(ConnectFun, Config) -> + rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_core_metrics, reset_auth_attempt_metrics, []), + {ok, C} = ConnectFun(Config), + receive {mqttc, C, connected} -> emqttc:disconnect(C) + after ?CONNECT_TIMEOUT -> exit(emqttc_connection_timeout) + end, + [Attempt] = + rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_core_metrics, get_auth_attempts, []), + ?assertEqual(false, proplists:is_defined(remote_address, Attempt)), + ?assertEqual(false, proplists:is_defined(username, Attempt)), + ?assertEqual(proplists:get_value(protocol, Attempt), <<"mqtt">>), + ?assertEqual(proplists:get_value(auth_attempts, Attempt), 1), + ?assertEqual(proplists:get_value(auth_attempts_failed, Attempt), 0), + ?assertEqual(proplists:get_value(auth_attempts_succeeded, Attempt), 1). + +expect_authentication_failure(ConnectFun, Config) -> + process_flag(trap_exit, true), + rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_core_metrics, reset_auth_attempt_metrics, []), + {ok, C} = ConnectFun(Config), + Result = receive + {mqttc, C, connected} -> {error, unexpected_anonymous_connection}; + {'EXIT', C, {shutdown,{connack_error,'CONNACK_AUTH'}}} -> ok; + {'EXIT', C, {shutdown,{connack_error,'CONNACK_CREDENTIALS'}}} -> ok + after + ?CONNECT_TIMEOUT -> {error, emqttc_connection_timeout} + end, + [Attempt] = + rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_core_metrics, get_auth_attempts, []), + ?assertEqual(false, proplists:is_defined(remote_address, Attempt), <<>>), + ?assertEqual(false, proplists:is_defined(username, Attempt)), + ?assertEqual(proplists:get_value(protocol, Attempt), <<"mqtt">>), + ?assertEqual(proplists:get_value(auth_attempts, Attempt), 1), + ?assertEqual(proplists:get_value(auth_attempts_failed, Attempt), 1), + ?assertEqual(proplists:get_value(auth_attempts_succeeded, Attempt), 0), + process_flag(trap_exit, false), + case Result of + ok -> ok; + {error, Err} -> exit(Err) + end. diff --git a/deps/rabbitmq_mqtt/test/cluster_SUITE.erl b/deps/rabbitmq_mqtt/test/cluster_SUITE.erl new file mode 100644 index 0000000000..941b195ced --- /dev/null +++ b/deps/rabbitmq_mqtt/test/cluster_SUITE.erl @@ -0,0 +1,188 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% +-module(cluster_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +all() -> + [ + {group, non_parallel_tests} + ]. + +groups() -> + [ + {non_parallel_tests, [], [ + connection_id_tracking, + connection_id_tracking_on_nodedown, + connection_id_tracking_with_decommissioned_node + ]} + ]. + +suite() -> + [{timetrap, {minutes, 5}}]. + +%% ------------------------------------------------------------------- +%% Testsuite setup/teardown. +%% ------------------------------------------------------------------- + +merge_app_env(Config) -> + rabbit_ct_helpers:merge_app_env(Config, + {rabbit, [ + {collect_statistics, basic}, + {collect_statistics_interval, 100} + ]}). + +init_per_suite(Config) -> + rabbit_ct_helpers:log_environment(), + rabbit_ct_helpers:run_setup_steps(Config). + +end_per_suite(Config) -> + rabbit_ct_helpers:run_teardown_steps(Config). + +init_per_group(_, Config) -> + Config. + +end_per_group(_, Config) -> + Config. + +init_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_started(Config, Testcase), + rabbit_ct_helpers:log_environment(), + Config1 = rabbit_ct_helpers:set_config(Config, [ + {rmq_nodename_suffix, Testcase}, + {rmq_extra_tcp_ports, [tcp_port_mqtt_extra, + tcp_port_mqtt_tls_extra]}, + {rmq_nodes_clustered, true}, + {rmq_nodes_count, 5} + ]), + rabbit_ct_helpers:run_setup_steps(Config1, + [ fun merge_app_env/1 ] ++ + rabbit_ct_broker_helpers:setup_steps() ++ + rabbit_ct_client_helpers:setup_steps()). + +end_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:run_teardown_steps(Config, + rabbit_ct_client_helpers:teardown_steps() ++ + rabbit_ct_broker_helpers:teardown_steps()), + rabbit_ct_helpers:testcase_finished(Config, Testcase). + +%% ------------------------------------------------------------------- +%% Test cases +%% ------------------------------------------------------------------- + +%% Note about running this testsuite in a mixed-versions cluster: +%% All even-numbered nodes will use the same code base when using a +%% secondary Umbrella. Odd-numbered nodes might use an incompatible code +%% base. When cluster-wide client ID tracking was introduced, it was not +%% put behind a feature flag because there was no need for one. Here, we +%% don't have a way to ensure that all nodes participate in client ID +%% tracking. However, those using the same code should. That's why we +%% limit our RPC calls to those nodes. +%% +%% That's also the reason why we use a 5-node cluster: with node 2 and +%% 4 which might not participate, it leaves nodes 1, 3 and 5: thus 3 +%% nodes, the minimum to use Ra in proper conditions. + +connection_id_tracking(Config) -> + ID = <<"duplicate-id">>, + {ok, MRef1, C1} = connect_to_node(Config, 0, ID), + emqttc:subscribe(C1, <<"TopicA">>, qos0), + emqttc:publish(C1, <<"TopicA">>, <<"Payload">>), + expect_publishes(<<"TopicA">>, [<<"Payload">>]), + + %% there's one connection + assert_connection_count(Config, 10, 2, 1), + + %% connect to the same node (A or 0) + {ok, MRef2, _C2} = connect_to_node(Config, 0, ID), + + %% C1 is disconnected + await_disconnection(MRef1), + + %% connect to a different node (C or 2) + {ok, _, C3} = connect_to_node(Config, 2, ID), + assert_connection_count(Config, 10, 2, 1), + + %% C2 is disconnected + await_disconnection(MRef2), + + emqttc:disconnect(C3). + +connection_id_tracking_on_nodedown(Config) -> + Server = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename), + {ok, MRef, C} = connect_to_node(Config, 0, <<"simpleClient">>), + emqttc:subscribe(C, <<"TopicA">>, qos0), + emqttc:publish(C, <<"TopicA">>, <<"Payload">>), + expect_publishes(<<"TopicA">>, [<<"Payload">>]), + assert_connection_count(Config, 10, 2, 1), + ok = rabbit_ct_broker_helpers:stop_node(Config, Server), + await_disconnection(MRef), + assert_connection_count(Config, 10, 2, 0), + ok. + +connection_id_tracking_with_decommissioned_node(Config) -> + Server = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename), + {ok, MRef, C} = connect_to_node(Config, 0, <<"simpleClient">>), + emqttc:subscribe(C, <<"TopicA">>, qos0), + emqttc:publish(C, <<"TopicA">>, <<"Payload">>), + expect_publishes(<<"TopicA">>, [<<"Payload">>]), + + assert_connection_count(Config, 10, 2, 1), + {ok, _} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["decommission_mqtt_node", Server]), + await_disconnection(MRef), + assert_connection_count(Config, 10, 2, 0), + ok. + +%% +%% Helpers +%% + +assert_connection_count(_Config, 0, _, _) -> + ct:fail("failed to complete rabbit_mqtt_collector:list/0"); +assert_connection_count(Config, Retries, NodeId, NumElements) -> + List = rabbit_ct_broker_helpers:rpc(Config, NodeId, rabbit_mqtt_collector, list, []), + case length(List) == NumElements of + true -> + ok; + false -> + timer:sleep(200), + assert_connection_count(Config, Retries-1, NodeId, NumElements) + end. + + + +connect_to_node(Config, Node, ClientID) -> + Port = rabbit_ct_broker_helpers:get_node_config(Config, Node, tcp_port_mqtt), + {ok, C} = connect(Port, ClientID), + MRef = erlang:monitor(process, C), + {ok, MRef, C}. + +connect(Port, ClientID) -> + {ok, C} = emqttc:start_link([{host, "localhost"}, + {port, Port}, + {client_id, ClientID}, + {proto_ver, 3}, + {logger, info}, + {puback_timeout, 1}]), + unlink(C), + {ok, C}. + +await_disconnection(Ref) -> + receive + {'DOWN', Ref, _, _, _} -> ok + after 30000 -> exit(missing_down_message) + end. + +expect_publishes(_Topic, []) -> ok; +expect_publishes(Topic, [Payload|Rest]) -> + receive + {publish, Topic, Payload} -> expect_publishes(Topic, Rest) + after 5000 -> + throw({publish_not_delivered, Payload}) + end. diff --git a/deps/rabbitmq_mqtt/test/command_SUITE.erl b/deps/rabbitmq_mqtt/test/command_SUITE.erl new file mode 100644 index 0000000000..a15c3789f7 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/command_SUITE.erl @@ -0,0 +1,158 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. + + +-module(command_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("amqp_client/include/amqp_client.hrl"). +-include("rabbit_mqtt.hrl"). + + +-define(COMMAND, 'Elixir.RabbitMQ.CLI.Ctl.Commands.ListMqttConnectionsCommand'). + +all() -> + [ + {group, non_parallel_tests} + ]. + +groups() -> + [ + {non_parallel_tests, [], [ + merge_defaults, + run + ]} + ]. + +init_per_suite(Config) -> + rabbit_ct_helpers:log_environment(), + Config1 = rabbit_ct_helpers:set_config(Config, [ + {rmq_nodename_suffix, ?MODULE}, + {rmq_extra_tcp_ports, [tcp_port_mqtt_extra, + tcp_port_mqtt_tls_extra]}, + {rmq_nodes_clustered, true}, + {rmq_nodes_count, 3} + ]), + rabbit_ct_helpers:run_setup_steps(Config1, + rabbit_ct_broker_helpers:setup_steps() ++ + rabbit_ct_client_helpers:setup_steps()). + +end_per_suite(Config) -> + rabbit_ct_helpers:run_teardown_steps(Config, + rabbit_ct_client_helpers:teardown_steps() ++ + rabbit_ct_broker_helpers:teardown_steps()). + +init_per_group(_, Config) -> + Config. + +end_per_group(_, Config) -> + Config. + +init_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_started(Config, Testcase). + +end_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_finished(Config, Testcase). + +merge_defaults(_Config) -> + {[<<"client_id">>, <<"conn_name">>], #{verbose := false}} = + ?COMMAND:merge_defaults([], #{}), + + {[<<"other_key">>], #{verbose := true}} = + ?COMMAND:merge_defaults([<<"other_key">>], #{verbose => true}), + + {[<<"other_key">>], #{verbose := false}} = + ?COMMAND:merge_defaults([<<"other_key">>], #{verbose => false}). + + +run(Config) -> + + Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename), + Opts = #{node => Node, timeout => 10000, verbose => false}, + + %% No connections + [] = 'Elixir.Enum':to_list(?COMMAND:run([], Opts)), + + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + {ok, _} = emqttc:start_link([{host, "localhost"}, + {port, P}, + {client_id, <<"simpleClient">>}, + {proto_ver, 3}, + {logger, info}, + {puback_timeout, 1}]), + ct:sleep(100), + + [[{client_id, <<"simpleClient">>}]] = + 'Elixir.Enum':to_list(?COMMAND:run([<<"client_id">>], Opts)), + + + {ok, _} = emqttc:start_link([{host, "localhost"}, + {port, P}, + {client_id, <<"simpleClient1">>}, + {proto_ver, 3}, + {logger, info}, + {username, <<"guest">>}, + {password, <<"guest">>}, + {puback_timeout, 1}]), + ct:sleep(200), + + [[{client_id, <<"simpleClient">>}, {user, <<"guest">>}], + [{client_id, <<"simpleClient1">>}, {user, <<"guest">>}]] = + lists:sort( + 'Elixir.Enum':to_list(?COMMAND:run([<<"client_id">>, <<"user">>], + Opts))), + + Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp), + start_amqp_connection(network, Node, Port), + + %% There are still just two connections + [[{client_id, <<"simpleClient">>}], + [{client_id, <<"simpleClient1">>}]] = + lists:sort('Elixir.Enum':to_list(?COMMAND:run([<<"client_id">>], Opts))), + + start_amqp_connection(direct, Node, Port), + ct:sleep(200), + + %% Still two MQTT connections, one direct AMQP 0-9-1 connection + [[{client_id, <<"simpleClient">>}], + [{client_id, <<"simpleClient1">>}]] = + lists:sort('Elixir.Enum':to_list(?COMMAND:run([<<"client_id">>], Opts))), + + %% Verbose returns all keys + Infos = lists:map(fun(El) -> atom_to_binary(El, utf8) end, ?INFO_ITEMS), + AllKeys1 = 'Elixir.Enum':to_list(?COMMAND:run(Infos, Opts)), + AllKeys2 = 'Elixir.Enum':to_list(?COMMAND:run([], Opts#{verbose => true})), + + %% There are two connections + [FirstPL, _] = AllKeys1, + [SecondPL, _] = AllKeys2, + + First = maps:from_list(lists:usort(FirstPL)), + Second = maps:from_list(lists:usort(SecondPL)), + + %% Keys are INFO_ITEMS + KeysCount = length(?INFO_ITEMS), + ?assert(KeysCount =:= maps:size(First)), + ?assert(KeysCount =:= maps:size(Second)), + + Keys = maps:keys(First), + + [] = Keys -- ?INFO_ITEMS, + [] = ?INFO_ITEMS -- Keys. + + +start_amqp_connection(Type, Node, Port) -> + amqp_connection:start(amqp_params(Type, Node, Port)). + +amqp_params(network, _, Port) -> + #amqp_params_network{port = Port}; +amqp_params(direct, Node, _) -> + #amqp_params_direct{node = Node}. + + + diff --git a/deps/rabbitmq_mqtt/test/config_schema_SUITE.erl b/deps/rabbitmq_mqtt/test/config_schema_SUITE.erl new file mode 100644 index 0000000000..c760148cad --- /dev/null +++ b/deps/rabbitmq_mqtt/test/config_schema_SUITE.erl @@ -0,0 +1,55 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2016-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(config_schema_SUITE). + +-compile(export_all). + +all() -> + [ + run_snippets + ]. + +%% ------------------------------------------------------------------- +%% Testsuite setup/teardown. +%% ------------------------------------------------------------------- + +init_per_suite(Config) -> + rabbit_ct_helpers:log_environment(), + Config1 = rabbit_ct_helpers:run_setup_steps(Config), + rabbit_ct_config_schema:init_schemas(rabbitmq_mqtt, Config1). + + +end_per_suite(Config) -> + rabbit_ct_helpers:run_teardown_steps(Config). + +init_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_started(Config, Testcase), + Config1 = rabbit_ct_helpers:set_config(Config, [ + {rmq_nodename_suffix, Testcase} + ]), + rabbit_ct_helpers:run_steps(Config1, + rabbit_ct_broker_helpers:setup_steps() ++ + rabbit_ct_client_helpers:setup_steps()). + +end_per_testcase(Testcase, Config) -> + Config1 = rabbit_ct_helpers:run_steps(Config, + rabbit_ct_client_helpers:teardown_steps() ++ + rabbit_ct_broker_helpers:teardown_steps()), + rabbit_ct_helpers:testcase_finished(Config1, Testcase). + +%% ------------------------------------------------------------------- +%% Testcases. +%% ------------------------------------------------------------------- + +run_snippets(Config) -> + ok = rabbit_ct_broker_helpers:rpc(Config, 0, + ?MODULE, run_snippets1, [Config]). + +run_snippets1(Config) -> + rabbit_ct_config_schema:run_snippets(Config). + diff --git a/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/cacert.pem b/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/cacert.pem new file mode 100644 index 0000000000..eaf6b67806 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/cacert.pem @@ -0,0 +1 @@ +I'm not a certificate diff --git a/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/cert.pem b/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/cert.pem new file mode 100644 index 0000000000..eaf6b67806 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/cert.pem @@ -0,0 +1 @@ +I'm not a certificate diff --git a/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/key.pem b/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/key.pem new file mode 100644 index 0000000000..eaf6b67806 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/certs/key.pem @@ -0,0 +1 @@ +I'm not a certificate diff --git a/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/rabbitmq_mqtt.snippets b/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/rabbitmq_mqtt.snippets new file mode 100644 index 0000000000..032cce01f9 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/config_schema_SUITE_data/rabbitmq_mqtt.snippets @@ -0,0 +1,144 @@ +[{defaults, + "listeners.tcp.default = 5672 + mqtt.default_user = guest + mqtt.default_pass = guest + mqtt.allow_anonymous = true + mqtt.vhost = / + mqtt.exchange = amq.topic + mqtt.subscription_ttl = 1800000 + mqtt.prefetch = 10 + mqtt.sparkplug = true + mqtt.listeners.ssl = none +## Default MQTT with TLS port is 8883 +# mqtt.listeners.ssl.default = 8883 + mqtt.listeners.tcp.default = 1883 + mqtt.tcp_listen_options.backlog = 128 + mqtt.tcp_listen_options.nodelay = true + mqtt.proxy_protocol = false", + [{rabbit,[{tcp_listeners,[5672]}]}, + {rabbitmq_mqtt, + [{default_user,<<"guest">>}, + {default_pass,<<"guest">>}, + {allow_anonymous,true}, + {vhost,<<"/">>}, + {exchange,<<"amq.topic">>}, + {subscription_ttl,1800000}, + {prefetch,10}, + {sparkplug,true}, + {ssl_listeners,[]}, + {tcp_listeners,[1883]}, + {tcp_listen_options,[{backlog,128},{nodelay,true}]}, + {proxy_protocol,false}]}], + [rabbitmq_mqtt]}, + + {listener_tcp_options, + "mqtt.listeners.tcp.1 = 127.0.0.1:61613 + mqtt.listeners.tcp.2 = ::1:61613 + + mqtt.tcp_listen_options.backlog = 2048 + mqtt.tcp_listen_options.recbuf = 8192 + mqtt.tcp_listen_options.sndbuf = 8192 + + mqtt.tcp_listen_options.keepalive = true + mqtt.tcp_listen_options.nodelay = true + + mqtt.tcp_listen_options.exit_on_close = true + + mqtt.tcp_listen_options.send_timeout = 120 +", + [{rabbitmq_mqtt,[ + {tcp_listeners,[ + {"127.0.0.1",61613}, + {"::1",61613} + ]} + , {tcp_listen_options, [ + {backlog, 2048}, + {exit_on_close, true}, + + {recbuf, 8192}, + {sndbuf, 8192}, + + {send_timeout, 120}, + + {keepalive, true}, + {nodelay, true} + ]} + ]}], + [rabbitmq_mqtt]}, + + + {ssl, + "ssl_options.cacertfile = test/config_schema_SUITE_data/certs/cacert.pem + ssl_options.certfile = test/config_schema_SUITE_data/certs/cert.pem + ssl_options.keyfile = test/config_schema_SUITE_data/certs/key.pem + ssl_options.verify = verify_peer + ssl_options.fail_if_no_peer_cert = true + + mqtt.listeners.ssl.default = 8883 + mqtt.listeners.tcp.default = 1883", + [{rabbit, + [{ssl_options, + [{cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"}, + {certfile,"test/config_schema_SUITE_data/certs/cert.pem"}, + {keyfile,"test/config_schema_SUITE_data/certs/key.pem"}, + {verify,verify_peer}, + {fail_if_no_peer_cert,true}]}]}, + {rabbitmq_mqtt,[{ssl_listeners,[8883]},{tcp_listeners,[1883]}]}], + [rabbitmq_mqtt]}, + {ssl_cert_login, + "mqtt.ssl_cert_login = true", + [{rabbitmq_mqtt,[{ssl_cert_login,true}]}], + [rabbitmq_mqtt]}, + {ssl_cert_login_from, + "ssl_cert_login_from = common_name", + [{rabbit,[{ssl_cert_login_from,common_name}]}], + [rabbitmq_mqtt]}, + {proxy_protocol, + "listeners.tcp.default = 5672 + mqtt.default_user = guest + mqtt.default_pass = guest + mqtt.allow_anonymous = true + mqtt.vhost = / + mqtt.exchange = amq.topic + mqtt.subscription_ttl = undefined + mqtt.prefetch = 10 + mqtt.proxy_protocol = true", + [{rabbit,[{tcp_listeners,[5672]}]}, + {rabbitmq_mqtt, + [{default_user,<<"guest">>}, + {default_pass,<<"guest">>}, + {allow_anonymous,true}, + {vhost,<<"/">>}, + {exchange,<<"amq.topic">>}, + {subscription_ttl,undefined}, + {prefetch,10}, + {proxy_protocol,true}]}], + [rabbitmq_mqtt]}, + {prefetch_retained_msg_store, + "mqtt.default_user = guest + mqtt.default_pass = guest + mqtt.allow_anonymous = true + mqtt.vhost = / + mqtt.exchange = amq.topic + mqtt.subscription_ttl = 1800000 + mqtt.prefetch = 10 +## use DETS (disk-based) store for retained messages + mqtt.retained_message_store = rabbit_mqtt_retained_msg_store_dets +## only used by DETS store + mqtt.retained_message_store_dets_sync_interval = 2000 + + mqtt.listeners.ssl = none + mqtt.listeners.tcp.default = 1883", + [{rabbitmq_mqtt, + [{default_user,<<"guest">>}, + {default_pass,<<"guest">>}, + {allow_anonymous,true}, + {vhost,<<"/">>}, + {exchange,<<"amq.topic">>}, + {subscription_ttl,1800000}, + {prefetch,10}, + {retained_message_store,rabbit_mqtt_retained_msg_store_dets}, + {retained_message_store_dets_sync_interval,2000}, + {ssl_listeners,[]}, + {tcp_listeners,[1883]}]}], + [rabbitmq_mqtt]}]. diff --git a/deps/rabbitmq_mqtt/test/java_SUITE.erl b/deps/rabbitmq_mqtt/test/java_SUITE.erl new file mode 100644 index 0000000000..34ec8dac19 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE.erl @@ -0,0 +1,127 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +-module(java_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +-define(BASE_CONF_RABBIT, {rabbit, [{ssl_options, [{fail_if_no_peer_cert, false}]}]}). +-define(BASE_CONF_MQTT, + {rabbitmq_mqtt, [ + {ssl_cert_login, true}, + {allow_anonymous, false}, + {sparkplug, true}, + {tcp_listeners, []}, + {ssl_listeners, []} + ]}). + +all() -> + [ + {group, non_parallel_tests} + ]. + +groups() -> + [ + {non_parallel_tests, [], [ + java + ]} + ]. + +suite() -> + [{timetrap, {seconds, 600}}]. + +%% ------------------------------------------------------------------- +%% Testsuite setup/teardown. +%% ------------------------------------------------------------------- + +merge_app_env(Config) -> + {ok, Ssl} = q(Config, [erlang_node_config, rabbit, ssl_options]), + Ssl1 = lists:keyreplace(fail_if_no_peer_cert, 1, Ssl, {fail_if_no_peer_cert, false}), + Config1 = rabbit_ct_helpers:merge_app_env(Config, {rabbit, [{ssl_options, Ssl1}]}), + rabbit_ct_helpers:merge_app_env(Config1, ?BASE_CONF_MQTT). + +init_per_suite(Config) -> + rabbit_ct_helpers:log_environment(), + Config1 = rabbit_ct_helpers:set_config(Config, [ + {rmq_nodename_suffix, ?MODULE}, + {rmq_certspwd, "bunnychow"}, + {rmq_nodes_clustered, true}, + {rmq_nodes_count, 3} + ]), + rabbit_ct_helpers:run_setup_steps(Config1, + [ fun merge_app_env/1 ] ++ + rabbit_ct_broker_helpers:setup_steps() ++ + rabbit_ct_client_helpers:setup_steps()). + +end_per_suite(Config) -> + rabbit_ct_helpers:run_teardown_steps(Config, + rabbit_ct_client_helpers:teardown_steps() ++ + rabbit_ct_broker_helpers:teardown_steps()). + +init_per_group(_, Config) -> + Config. + +end_per_group(_, Config) -> + Config. + +init_per_testcase(Testcase, Config) -> + CertsDir = ?config(rmq_certsdir, Config), + CertFile = filename:join([CertsDir, "client", "cert.pem"]), + {ok, CertBin} = file:read_file(CertFile), + [{'Certificate', Cert, not_encrypted}] = public_key:pem_decode(CertBin), + UserBin = rabbit_ct_broker_helpers:rpc(Config, 0, + rabbit_ssl, + peer_cert_auth_name, + [Cert]), + User = binary_to_list(UserBin), + {ok,_} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["add_user", User, ""]), + {ok, _} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["set_permissions", "-p", "/", User, ".*", ".*", ".*"]), + {ok, _} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, + ["set_topic_permissions", "-p", "/", "guest", "amq.topic", + % Write permission + "test-topic|test-retained-topic|{username}.{client_id}.a|^sp[AB]v\\d+___\\d+", + % Read permission + "test-topic|test-retained-topic|last-will|{username}.{client_id}.a|^sp[AB]v\\d+___\\d+"]), + rabbit_ct_helpers:testcase_started(Config, Testcase). + +end_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_finished(Config, Testcase). + + +%% ------------------------------------------------------------------- +%% Testsuite cases +%% ------------------------------------------------------------------- + +java(Config) -> + CertsDir = rabbit_ct_helpers:get_config(Config, rmq_certsdir), + MqttPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + MqttPort2 = rabbit_ct_broker_helpers:get_node_config(Config, 1, tcp_port_mqtt), + MqttPort3 = rabbit_ct_broker_helpers:get_node_config(Config, 2, tcp_port_mqtt), + MqttSslPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt_tls), + AmqpPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp), + os:putenv("SSL_CERTS_DIR", CertsDir), + os:putenv("MQTT_SSL_PORT", erlang:integer_to_list(MqttSslPort)), + os:putenv("MQTT_PORT", erlang:integer_to_list(MqttPort)), + os:putenv("MQTT_PORT_2", erlang:integer_to_list(MqttPort2)), + os:putenv("MQTT_PORT_3", erlang:integer_to_list(MqttPort3)), + os:putenv("AMQP_PORT", erlang:integer_to_list(AmqpPort)), + DataDir = rabbit_ct_helpers:get_config(Config, data_dir), + MakeResult = rabbit_ct_helpers:make(Config, DataDir, ["tests"]), + {ok, _} = MakeResult. + +rpc(Config, M, F, A) -> + rabbit_ct_broker_helpers:rpc(Config, 0, M, F, A). + +q(P, [K | Rem]) -> + case proplists:get_value(K, P) of + undefined -> undefined; + V -> q(V, Rem) + end; +q(P, []) -> {ok, P}. + diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/.gitignore b/deps/rabbitmq_mqtt/test/java_SUITE_data/.gitignore new file mode 100644 index 0000000000..4c70cdb707 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/.gitignore @@ -0,0 +1,3 @@ +/build/ +/lib/ +/target/ diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/MavenWrapperDownloader.java b/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/MavenWrapperDownloader.java new file mode 100755 index 0000000000..2e394d5b34 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/MavenWrapperDownloader.java @@ -0,0 +1,110 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +*/ + +import java.net.*; +import java.io.*; +import java.nio.channels.*; +import java.util.Properties; + +public class MavenWrapperDownloader { + + /** + * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. + */ + private static final String DEFAULT_DOWNLOAD_URL = + "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"; + + /** + * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to + * use instead of the default one. + */ + private static final String MAVEN_WRAPPER_PROPERTIES_PATH = + ".mvn/wrapper/maven-wrapper.properties"; + + /** + * Path where the maven-wrapper.jar will be saved to. + */ + private static final String MAVEN_WRAPPER_JAR_PATH = + ".mvn/wrapper/maven-wrapper.jar"; + + /** + * Name of the property which should be used to override the default download url for the wrapper. + */ + private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; + + public static void main(String args[]) { + System.out.println("- Downloader started"); + File baseDirectory = new File(args[0]); + System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); + + // If the maven-wrapper.properties exists, read it and check if it contains a custom + // wrapperUrl parameter. + File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); + String url = DEFAULT_DOWNLOAD_URL; + if(mavenWrapperPropertyFile.exists()) { + FileInputStream mavenWrapperPropertyFileInputStream = null; + try { + mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); + Properties mavenWrapperProperties = new Properties(); + mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); + url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); + } catch (IOException e) { + System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); + } finally { + try { + if(mavenWrapperPropertyFileInputStream != null) { + mavenWrapperPropertyFileInputStream.close(); + } + } catch (IOException e) { + // Ignore ... + } + } + } + System.out.println("- Downloading from: : " + url); + + File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); + if(!outputFile.getParentFile().exists()) { + if(!outputFile.getParentFile().mkdirs()) { + System.out.println( + "- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'"); + } + } + System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); + try { + downloadFileFromURL(url, outputFile); + System.out.println("Done"); + System.exit(0); + } catch (Throwable e) { + System.out.println("- Error downloading"); + e.printStackTrace(); + System.exit(1); + } + } + + private static void downloadFileFromURL(String urlString, File destination) throws Exception { + URL website = new URL(urlString); + ReadableByteChannel rbc; + rbc = Channels.newChannel(website.openStream()); + FileOutputStream fos = new FileOutputStream(destination); + fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); + fos.close(); + rbc.close(); + } + +} diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/maven-wrapper.jar b/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/maven-wrapper.jar Binary files differnew file mode 100755 index 0000000000..01e6799737 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/maven-wrapper.jar diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/maven-wrapper.properties b/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/maven-wrapper.properties new file mode 100755 index 0000000000..00d32aab1d --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1 @@ +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
\ No newline at end of file diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/Makefile b/deps/rabbitmq_mqtt/test/java_SUITE_data/Makefile new file mode 100644 index 0000000000..e2f9748eb2 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/Makefile @@ -0,0 +1,27 @@ +export PATH :=$(CURDIR):$(PATH) +HOSTNAME := $(shell hostname) +MVN_FLAGS += -Ddeps.dir="$(abspath $(DEPS_DIR))" \ + -Dhostname=$(HOSTNAME) \ + -Dcerts.dir=$(SSL_CERTS_DIR) \ + -Dmqtt.ssl.port=$(MQTT_SSL_PORT) \ + -Dmqtt.port=$(MQTT_PORT) \ + -Dmqtt.port.2=$(MQTT_PORT_2) \ + -Dmqtt.port.3=$(MQTT_PORT_3) \ + -Damqp.port=$(AMQP_PORT) + +.PHONY: deps tests clean distclean + +deps: + mkdir -p lib + @mvnw dependency:copy-dependencies -DoutputDirectory=lib + +tests: + # Note: to run a single test + # @mvnw -q $(MVN_FLAGS) -Dtest=MqttTest#subscribeMultiple test + @mvnw -q $(MVN_FLAGS) test + +clean: + @mvnw clean + +distclean: clean + rm -f lib/*.jar diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/mvnw b/deps/rabbitmq_mqtt/test/java_SUITE_data/mvnw new file mode 100755 index 0000000000..8b9da3b8b6 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/mvnw @@ -0,0 +1,286 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven2 Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" + # TODO classpath? +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + wget "$jarUrl" -O "$wrapperJarPath" + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + curl -o "$wrapperJarPath" "$jarUrl" + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/mvnw.cmd b/deps/rabbitmq_mqtt/test/java_SUITE_data/mvnw.cmd new file mode 100755 index 0000000000..a5284c7939 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/mvnw.cmd @@ -0,0 +1,161 @@ +@REM ----------------------------------------------------------------------------
+@REM Licensed to the Apache Software Foundation (ASF) under one
+@REM or more contributor license agreements. See the NOTICE file
+@REM distributed with this work for additional information
+@REM regarding copyright ownership. The ASF licenses this file
+@REM to you under the Apache License, Version 2.0 (the
+@REM "License"); you may not use this file except in compliance
+@REM with the License. You may obtain a copy of the License at
+@REM
+@REM https://www.apache.org/licenses/LICENSE-2.0
+@REM
+@REM Unless required by applicable law or agreed to in writing,
+@REM software distributed under the License is distributed on an
+@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@REM KIND, either express or implied. See the License for the
+@REM specific language governing permissions and limitations
+@REM under the License.
+@REM ----------------------------------------------------------------------------
+
+@REM ----------------------------------------------------------------------------
+@REM Maven2 Start Up Batch script
+@REM
+@REM Required ENV vars:
+@REM JAVA_HOME - location of a JDK home dir
+@REM
+@REM Optional ENV vars
+@REM M2_HOME - location of maven2's installed home dir
+@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
+@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
+@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
+@REM e.g. to debug Maven itself, use
+@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+@REM ----------------------------------------------------------------------------
+
+@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
+@echo off
+@REM set title of command window
+title %0
+@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
+@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
+
+@REM set %HOME% to equivalent of $HOME
+if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
+
+@REM Execute a user defined script before this one
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
+@REM check for pre script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
+if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
+:skipRcPre
+
+@setlocal
+
+set ERROR_CODE=0
+
+@REM To isolate internal variables from possible post scripts, we use another setlocal
+@setlocal
+
+@REM ==== START VALIDATION ====
+if not "%JAVA_HOME%" == "" goto OkJHome
+
+echo.
+echo Error: JAVA_HOME not found in your environment. >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+:OkJHome
+if exist "%JAVA_HOME%\bin\java.exe" goto init
+
+echo.
+echo Error: JAVA_HOME is set to an invalid directory. >&2
+echo JAVA_HOME = "%JAVA_HOME%" >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+@REM ==== END VALIDATION ====
+
+:init
+
+@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
+@REM Fallback to current working directory if not found.
+
+set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
+IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
+
+set EXEC_DIR=%CD%
+set WDIR=%EXEC_DIR%
+:findBaseDir
+IF EXIST "%WDIR%"\.mvn goto baseDirFound
+cd ..
+IF "%WDIR%"=="%CD%" goto baseDirNotFound
+set WDIR=%CD%
+goto findBaseDir
+
+:baseDirFound
+set MAVEN_PROJECTBASEDIR=%WDIR%
+cd "%EXEC_DIR%"
+goto endDetectBaseDir
+
+:baseDirNotFound
+set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
+cd "%EXEC_DIR%"
+
+:endDetectBaseDir
+
+IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
+
+@setlocal EnableExtensions EnableDelayedExpansion
+for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
+@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
+
+:endReadAdditionalConfig
+
+SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
+set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
+set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"
+FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO (
+ IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
+)
+
+@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
+if exist %WRAPPER_JAR% (
+ echo Found %WRAPPER_JAR%
+) else (
+ echo Couldn't find %WRAPPER_JAR%, downloading it ...
+ echo Downloading from: %DOWNLOAD_URL%
+ powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"
+ echo Finished downloading %WRAPPER_JAR%
+)
+@REM End of extension
+
+%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
+if ERRORLEVEL 1 goto error
+goto end
+
+:error
+set ERROR_CODE=1
+
+:end
+@endlocal & set ERROR_CODE=%ERROR_CODE%
+
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
+@REM check for post script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
+if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
+:skipRcPost
+
+@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
+if "%MAVEN_BATCH_PAUSE%" == "on" pause
+
+if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
+
+exit /B %ERROR_CODE%
diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/pom.xml b/deps/rabbitmq_mqtt/test/java_SUITE_data/pom.xml new file mode 100644 index 0000000000..b27b58c172 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/pom.xml @@ -0,0 +1,137 @@ +<?xml version="1.0"?> +<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" + xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> + <modelVersion>4.0.0</modelVersion> + + <groupId>com.rabbitmq</groupId> + <artifactId>amqp-client-mqtt</artifactId> + <version>3.8.0-SNAPSHOT</version> + <packaging>jar</packaging> + + <name>RabbitMQ MQTT plugin dependencies list</name> + <description>Fetches test dependencies only.</description> + <url>https://www.rabbitmq.com</url> + + <dependencies> + <dependency> + <groupId>org.eclipse.paho</groupId> + <artifactId>org.eclipse.paho.client.mqttv3</artifactId> + <version>[1.2.1,)</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>com.rabbitmq</groupId> + <artifactId>amqp-client</artifactId> + <version>5.7.3</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.junit.jupiter</groupId> + <artifactId>junit-jupiter</artifactId> + <version>5.5.2</version> + <scope>test</scope> + </dependency> + </dependencies> + + <properties> + <test-keystore.ca>${project.build.directory}/ca.keystore</test-keystore.ca> + <test-keystore.password>bunnychow</test-keystore.password> + <groovy-scripts.dir>${basedir}/src/test/scripts</groovy-scripts.dir> + </properties> + + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <version>2.22.2</version> + <configuration> + <environmentVariables> + <DEPS_DIR>${deps.dir}</DEPS_DIR> + </environmentVariables> + <systemPropertyVariables> + <hostname>${hostname}</hostname> + <certs.dir>${certs.dir}</certs.dir> + <mqtt.ssl.port>${mqtt.ssl.port}</mqtt.ssl.port> + <mqtt.port>${mqtt.port}</mqtt.port> + <mqtt.port.2>${mqtt.port.2}</mqtt.port.2> + <amqp.port>${amqp.port}</amqp.port> + + <test-keystore.ca>${test-keystore.ca}</test-keystore.ca> + <test-keystore.password>${test-keystore.password}</test-keystore.password> + <test-client-cert.path>${certs.dir}/client/keycert.p12</test-client-cert.path> + <test-client-cert.password>bunnychow</test-client-cert.password> + + </systemPropertyVariables> + <!-- + needed because of bug in OpenJDK 8 u181 on Debian distros + see https://stackoverflow.com/questions/53010200/maven-surefire-could-not-find-forkedbooter-class + --> + <argLine>-Djdk.net.URLClassPath.disableClassPathURLCheck=true</argLine> + </configuration> + </plugin> + <plugin> + <groupId>org.codehaus.gmaven</groupId> + <artifactId>groovy-maven-plugin</artifactId> + <version>2.1.1</version> + <dependencies> + <dependency> + <groupId>org.codehaus.groovy</groupId> + <artifactId>groovy-all</artifactId> + <version>2.4.17</version> + </dependency> + </dependencies> + <executions> + <execution> + <phase>generate-test-resources</phase> + <id>remove-old-test-keystores</id> + <goals> + <goal>execute</goal> + </goals> + <configuration> + <source> + ${groovy-scripts.dir}/remove_old_test_keystores.groovy + </source> + </configuration> + </execution> + </executions> + </plugin> + + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>keytool-maven-plugin</artifactId> + <version>1.5</version> + <executions> + <execution> + <id>generate-test-ca-keystore</id> + <phase>generate-test-resources</phase> + <goals> + <goal>importCertificate</goal> + </goals> + <configuration> + <file>${certs.dir}/testca/cacert.pem</file> + <keystore>${test-keystore.ca}</keystore> + <storepass>${test-keystore.password}</storepass> + <noprompt>true</noprompt> + <alias>server1</alias> + </configuration> + </execution> + </executions> + </plugin> + + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>3.8.1</version> + <configuration> + <source>1.8</source> + <target>1.8</target> + <compilerArgs> + <arg>-Xlint:deprecation</arg> + <arg>-Xlint:unchecked</arg> + </compilerArgs> + </configuration> + </plugin> + + </plugins> + </build> +</project> diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test.config b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test.config new file mode 100644 index 0000000000..3d6bafff86 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test.config @@ -0,0 +1,14 @@ +[{rabbitmq_mqtt, [ + {ssl_cert_login, true}, + {allow_anonymous, true}, + {tcp_listeners, [1883]}, + {ssl_listeners, [8883]} + ]}, + {rabbit, [{ssl_options, [{cacertfile,"%%CERTS_DIR%%/testca/cacert.pem"}, + {certfile,"%%CERTS_DIR%%/server/cert.pem"}, + {keyfile,"%%CERTS_DIR%%/server/key.pem"}, + {verify,verify_peer}, + {fail_if_no_peer_cert,false} + ]} + ]} +]. diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/MqttTest.java b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/MqttTest.java new file mode 100644 index 0000000000..24c4a0be14 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/MqttTest.java @@ -0,0 +1,1030 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. +// +// Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +// + +package com.rabbitmq.mqtt.test; + +import com.rabbitmq.client.*; +import org.eclipse.paho.client.mqttv3.*; +import org.eclipse.paho.client.mqttv3.internal.NetworkModule; +import org.eclipse.paho.client.mqttv3.internal.TCPNetworkModule; +import org.eclipse.paho.client.mqttv3.internal.wire.MqttPingReq; +import org.eclipse.paho.client.mqttv3.internal.wire.MqttWireMessage; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; + +import javax.net.SocketFactory; +import java.io.*; +import java.net.InetAddress; +import java.net.Socket; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.*; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BooleanSupplier; + +import static org.junit.jupiter.api.Assertions.*; + +/*** + * MQTT v3.1 tests + * + */ + +public class MqttTest implements MqttCallback { + + private static final Duration EXPECT_TIMEOUT = Duration.ofSeconds(10); + + private final String host = "localhost"; + private final String brokerUrl = "tcp://" + host + ":" + getPort(); + private final String brokerThreeUrl = "tcp://" + host + ":" + getThirdPort(); + private volatile List<MqttMessage> receivedMessages; + + private final byte[] payload = "payload".getBytes(); + private final String topic = "test-topic"; + private final String retainedTopic = "test-retained-topic"; + private int testDelay = 2000; + + private volatile long lastReceipt; + private volatile boolean expectConnectionFailure; + private volatile boolean failOnDelivery = false; + + private Connection conn; + private Channel ch; + + private static int getPort() { + Object port = System.getProperty("mqtt.port", "1883"); + assertNotNull(port); + return Integer.parseInt(port.toString()); + } + + private static int getThirdPort() { + Object port = System.getProperty("mqtt.port.3", "1883"); + assertNotNull(port); + return Integer.parseInt(port.toString()); + } + + private static int getAmqpPort() { + Object port = System.getProperty("amqp.port", "5672"); + assertNotNull(port); + return Integer.parseInt(port.toString()); + } + + // override the 10s limit + private class TestMqttConnectOptions extends MqttConnectOptions { + private int keepAliveInterval = 60; + private final String user_name = "guest"; + private final String password = "guest"; + + public TestMqttConnectOptions() { + super.setUserName(user_name); + super.setPassword(password.toCharArray()); + super.setCleanSession(true); + super.setKeepAliveInterval(60); + // PublishMultiple overwhelms Paho defaults + super.setMaxInflight(15000); + } + + @Override + public void setKeepAliveInterval(int keepAliveInterval) { + this.keepAliveInterval = keepAliveInterval; + } + + @Override + public int getKeepAliveInterval() { + return this.keepAliveInterval; + } + } + + private MqttClient newClient(TestInfo testInfo) throws MqttException { + return newClient(clientId(testInfo)); + } + + private MqttClient newClient(String client_id) throws MqttException { + return newClient(brokerUrl, client_id); + } + + private MqttClient newClient(String uri, TestInfo testInfo) throws MqttException { + return newClient(uri, clientId(testInfo)); + } + + private MqttClient newClient(String uri, String client_id) throws MqttException { + return new MqttClient(uri, client_id, null); + } + + private MqttClient newConnectedClient(TestInfo testInfo, MqttConnectOptions conOpt) throws MqttException { + return newConnectedClient(clientId(testInfo), conOpt); + } + + private MqttClient newConnectedClient(String client_id, MqttConnectOptions conOpt) throws MqttException { + MqttClient client = newClient(brokerUrl, client_id); + client.connect(conOpt); + return client; + } + + private static String clientId(TestInfo info) { + return "test-" + info.getTestMethod().get().getName(); + } + + private void disconnect(MqttClient client) { + try { + if (client.isConnected()) { + client.disconnect(5000); + } + } catch (Exception ignored) {} + } + + @BeforeEach + public void setUp() { + receivedMessages = Collections.synchronizedList(new ArrayList<>()); + expectConnectionFailure = false; + } + + @AfterEach + public void tearDown() { + // clean any sticky sessions + receivedMessages.clear(); + } + + private void setUpAmqp() throws IOException, TimeoutException { + int port = getAmqpPort(); + ConnectionFactory cf = new ConnectionFactory(); + cf.setHost(host); + cf.setPort(port); + conn = cf.newConnection(); + ch = conn.createChannel(); + } + + private void tearDownAmqp() throws IOException { + if (conn.isOpen()) { + conn.close(); + } + } + + @Test + public void connectFirst() throws MqttException, IOException { + NetworkModule networkModule = new TCPNetworkModule(SocketFactory.getDefault(), host, getPort(), ""); + networkModule.start(); + DataInputStream in = new DataInputStream(networkModule.getInputStream()); + OutputStream out = networkModule.getOutputStream(); + + MqttWireMessage message = new MqttPingReq(); + + try { + // ---8<--- + // Copy/pasted from write() in MqttOutputStream.java. + byte[] bytes = message.getHeader(); + byte[] pl = message.getPayload(); + out.write(bytes,0,bytes.length); + + int offset = 0; + int chunckSize = 1024; + while (offset < pl.length) { + int length = Math.min(chunckSize, pl.length - offset); + out.write(pl, offset, length); + offset += chunckSize; + } + // ---8<--- + + // ---8<--- + // Copy/pasted from flush() in MqttOutputStream.java. + out.flush(); + // ---8<--- + + // ---8<--- + // Copy/pasted from readMqttWireMessage() in MqttInputStream.java. + ByteArrayOutputStream bais = new ByteArrayOutputStream(); + byte first = in.readByte(); + // ---8<--- + + fail("Error expected if CONNECT is not first packet"); + } catch (IOException ignored) {} + } + + @Test public void invalidUser(TestInfo info) throws MqttException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + client_opts.setUserName("invalid-user"); + MqttClient client = newClient(info); + try { + client.connect(client_opts); + fail("Authentication failure expected"); + } catch (MqttException ex) { + assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode()); + } finally { + if (client.isConnected()) { + disconnect(client); + } + } + } + + // rabbitmq/rabbitmq-mqtt#37: QoS 1, clean session = false + @Test public void qos1AndCleanSessionUnset() + throws MqttException, IOException, TimeoutException, InterruptedException { + testQueuePropertiesWithCleanSessionUnset("qos1-no-clean-session", 1, true, false); + } + + protected void testQueuePropertiesWithCleanSessionSet(String cid, int qos, boolean durable, boolean autoDelete) + throws IOException, MqttException, TimeoutException, InterruptedException { + testQueuePropertiesWithCleanSession(true, cid, qos, durable, autoDelete); + } + + protected void testQueuePropertiesWithCleanSessionUnset(String cid, int qos, boolean durable, boolean autoDelete) + throws IOException, MqttException, TimeoutException, InterruptedException { + testQueuePropertiesWithCleanSession(false, cid, qos, durable, autoDelete); + } + + protected void testQueuePropertiesWithCleanSession(boolean cleanSession, String cid, int qos, + boolean durable, boolean autoDelete) + throws MqttException, IOException, TimeoutException { + MqttClient c = newClient(brokerUrl, cid); + MqttConnectOptions opts = new TestMqttConnectOptions(); + opts.setUserName("guest"); + opts.setPassword("guest".toCharArray()); + opts.setCleanSession(cleanSession); + c.connect(opts); + + setUpAmqp(); + Channel tmpCh = conn.createChannel(); + + String q = "mqtt-subscription-" + cid + "qos" + qos; + + c.subscribe(topic, qos); + // there is no server-sent notification about subscription + // success so we inject a delay + waitForTestDelay(); + + // ensure the queue is declared with the arguments we expect + // e.g. mqtt-subscription-client-3aqos0 + try { + // first ensure the queue exists + tmpCh.queueDeclarePassive(q); + // then assert on properties + Map<String, Object> args = new HashMap<>(); + args.put("x-expires", 86400000); + tmpCh.queueDeclare(q, durable, autoDelete, false, args); + } finally { + if (c.isConnected()) { + c.disconnect(3000); + } + + Channel tmpCh2 = conn.createChannel(); + tmpCh2.queueDelete(q); + tmpCh2.close(); + tearDownAmqp(); + } + } + + @Test public void invalidPassword(TestInfo info) throws MqttException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + client_opts.setUserName("invalid-user"); + client_opts.setPassword("invalid-password".toCharArray()); + MqttClient client = newClient(info); + try { + client.connect(client_opts); + fail("Authentication failure expected"); + } catch (MqttException ex) { + assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode()); + } finally { + if (client.isConnected()) { + disconnect(client); + } + } + } + + @Test public void emptyPassword(TestInfo info) throws MqttException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + client_opts.setPassword("".toCharArray()); + + MqttClient client = newClient(info); + try { + client.connect(client_opts); + fail("Authentication failure expected"); + } catch (MqttException ex) { + assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode()); + } + } + + + @Test public void subscribeQos0(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newClient(info); + client.connect(client_opts); + client.setCallback(this); + client.subscribe(topic, 0); + + publish(client, topic, 0, payload); + waitAtMost(() -> receivedMessagesSize() == 1); + assertArrayEquals(receivedMessages.get(0).getPayload(), payload); + assertEquals(0, receivedMessages.get(0).getQos()); + disconnect(client); + } + + @Test public void subscribeUnsubscribe(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newClient(info); + client.connect(client_opts); + client.setCallback(this); + client.subscribe(topic, 0); + + publish(client, topic, 1, payload); + + waitAtMost(() -> receivedMessagesSize() == 1); + assertArrayEquals(receivedMessages.get(0).getPayload(), payload); + assertEquals(0, receivedMessages.get(0).getQos()); + + client.unsubscribe(topic); + publish(client, topic, 0, payload); + waitAtMost(() -> receivedMessagesSize() == 1); + disconnect(client); + } + + @Test public void subscribeQos1(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newClient(info); + client.connect(client_opts); + client.setCallback(this); + client.subscribe(topic, 1); + + publish(client, topic, 0, payload); + publish(client, topic, 1, payload); + publish(client, topic, 2, payload); + + waitAtMost(() -> receivedMessagesSize() == 3); + + MqttMessage msg1 = receivedMessages.get(0); + MqttMessage msg2 = receivedMessages.get(1); + MqttMessage msg3 = receivedMessages.get(1); + + assertArrayEquals(msg1.getPayload(), payload); + assertEquals(0, msg1.getQos()); + + assertArrayEquals(msg2.getPayload(), payload); + assertEquals(1, msg2.getQos()); + + // Downgraded QoS 2 to QoS 1 + assertArrayEquals(msg3.getPayload(), payload); + assertEquals(1, msg3.getQos()); + + disconnect(client); + } + + @Test public void subscribeReceivesRetainedMessagesWithMatchingQoS(TestInfo info) + throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newClient(info); + client.connect(client_opts); + client.setCallback(this); + clearRetained(client, retainedTopic); + client.subscribe(retainedTopic, 1); + + publishRetained(client, retainedTopic, 1, "retain 1".getBytes(StandardCharsets.UTF_8)); + publishRetained(client, retainedTopic, 1, "retain 2".getBytes(StandardCharsets.UTF_8)); + + waitAtMost(() -> receivedMessagesSize() == 2); + MqttMessage lastMsg = receivedMessages.get(1); + + client.unsubscribe(retainedTopic); + receivedMessages.clear(); + client.subscribe(retainedTopic, 1); + waitAtMost(() -> receivedMessagesSize() == 1); + final MqttMessage retainedMsg = receivedMessages.get(0); + assertEquals(new String(lastMsg.getPayload()), + new String(retainedMsg.getPayload())); + + disconnect(client); + } + + @Test public void subscribeReceivesRetainedMessagesWithDowngradedQoS(TestInfo info) + throws MqttException, InterruptedException { + MqttConnectOptions clientOpts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, clientOpts); + client.setCallback(this); + clearRetained(client, retainedTopic); + client.subscribe(retainedTopic, 1); + + publishRetained(client, retainedTopic, 1, "retain 1".getBytes(StandardCharsets.UTF_8)); + + waitAtMost(() -> receivedMessagesSize() == 1); + MqttMessage lastMsg = receivedMessages.get(0); + + client.unsubscribe(retainedTopic); + receivedMessages.clear(); + final int subscribeQoS = 0; + client.subscribe(retainedTopic, subscribeQoS); + + waitAtMost(() -> receivedMessagesSize() == 1); + final MqttMessage retainedMsg = receivedMessages.get(0); + assertEquals(new String(lastMsg.getPayload()), + new String(retainedMsg.getPayload())); + assertEquals(subscribeQoS, retainedMsg.getQos()); + + disconnect(client); + } + + @Test public void publishWithEmptyMessageClearsRetained(TestInfo info) + throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + client.setCallback(this); + clearRetained(client, retainedTopic); + client.subscribe(retainedTopic, 1); + + publishRetained(client, retainedTopic, 1, "retain 1".getBytes(StandardCharsets.UTF_8)); + publishRetained(client, retainedTopic, 1, "retain 2".getBytes(StandardCharsets.UTF_8)); + + waitAtMost(() -> receivedMessagesSize() == 2); + client.unsubscribe(retainedTopic); + receivedMessages.clear(); + + clearRetained(client, retainedTopic); + client.subscribe(retainedTopic, 1); + waitAtMost(() -> receivedMessagesSize() == 0); + + disconnect(client); + } + + @Test public void topics(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + client.setCallback(this); + client.subscribe("/+/test-topic/#"); + String[] cases = new String[]{"/pre/test-topic2", "/test-topic", "/a/test-topic/b/c/d", "/frob/test-topic"}; + List<String> expected = Arrays.asList("/a/test-topic/b/c/d", "/frob/test-topic"); + for(String example : cases){ + publish(client, example, 0, example.getBytes()); + } + waitAtMost(() -> receivedMessagesSize() == expected.size()); + for (MqttMessage m : receivedMessages){ + expected.contains(new String(m.getPayload())); + } + disconnect(client); + } + + @Test public void sparkplugTopics(TestInfo info) throws MqttException, IOException, InterruptedException, TimeoutException { + final String amqp091Topic = "spBv1___0.MACLab.DDATA.Opto22.CLX"; + final String sparkplugTopic = "spBv1.0/MACLab/+/Opto22/CLX"; + + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + client.setCallback(this); + client.subscribe(sparkplugTopic); + + setUpAmqp(); + ch.basicPublish("amq.topic", amqp091Topic, MessageProperties.MINIMAL_BASIC, payload); + tearDownAmqp(); + + waitAtMost(() -> receivedMessagesSize() == 1); + disconnect(client); + } + + @Test public void nonCleanSession(TestInfo info) throws MqttException, InterruptedException { + String clientIdBase = clientId(info); + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + client_opts.setCleanSession(false); + MqttClient client = newConnectedClient(clientIdBase + "-1", client_opts); + client.subscribe(topic, 1); + client.disconnect(); + + MqttClient client2 = newConnectedClient(clientIdBase + "-2", client_opts); + publish(client2, topic, 1, payload); + client2.disconnect(); + + client.setCallback(this); + client.connect(client_opts); + + waitAtMost(() -> receivedMessagesSize() == 1); + assertArrayEquals(receivedMessages.get(0).getPayload(), payload); + disconnect(client); + } + + @Test public void sessionRedelivery(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + client_opts.setCleanSession(false); + MqttClient client = newConnectedClient(info, client_opts); + client.subscribe(topic, 1); + disconnect(client); + + MqttClient client2 = newConnectedClient(info, client_opts); + publish(client2, topic, 1, payload); + disconnect(client2); + + failOnDelivery = true; + + // Connection should fail. Messages will be redelivered. + client.setCallback(this); + client.connect(client_opts); + + // Message has been delivered but connection has failed. + waitAtMost(() -> receivedMessagesSize() == 1); + assertArrayEquals(receivedMessages.get(0).getPayload(), payload); + + assertFalse(client.isConnected()); + + receivedMessages.clear(); + failOnDelivery = false; + + client.setCallback(this); + client.connect(client_opts); + + // Message has been redelivered after session resume + waitAtMost(() -> receivedMessagesSize() == 1); + assertArrayEquals(receivedMessages.get(0).getPayload(), payload); + assertTrue(client.isConnected()); + disconnect(client); + + receivedMessages.clear(); + + client.setCallback(this); + waitAtMost(() -> client.isConnected() == false); + client.connect(client_opts); + + // This time messaage are acknowledged and won't be redelivered + waitAtMost(() -> receivedMessagesSize() == 0); + assertEquals(0, receivedMessages.size()); + + disconnect(client); + } + + @Test public void cleanSession(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + client_opts.setCleanSession(false); + MqttClient client = newConnectedClient(info, client_opts); + client.subscribe(topic, 1); + client.disconnect(); + + MqttClient client2 = newConnectedClient(info, client_opts); + publish(client2, topic, 1, payload); + disconnect(client2); + + client_opts.setCleanSession(true); + client.connect(client_opts); + client.setCallback(this); + client.subscribe(topic, 1); + + waitAtMost(() -> receivedMessagesSize() == 0); + client.unsubscribe(topic); + disconnect(client); + } + + @Test public void multipleClientIds(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + // uses duplicate client ID + MqttClient client2 = newConnectedClient(info, client_opts); + // the older connection with this client ID will be closed + waitAtMost(() -> client.isConnected() == false); + disconnect(client2); + } + + @Test public void multipleClusterClientIds(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + MqttClient client3 = newClient(brokerThreeUrl, info); + client3.connect(client_opts); + waitAtMost(() -> client.isConnected() == false); + disconnect(client3); + } + + @Test public void ping(TestInfo info) throws MqttException, InterruptedException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + client_opts.setKeepAliveInterval(1); + MqttClient client = newConnectedClient(info, client_opts); + waitAtMost(() -> client.isConnected()); + disconnect(client); + } + + @Test public void will(TestInfo info) throws MqttException, InterruptedException, IOException { + String clientIdBase = clientId(info); + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client2 = newConnectedClient(clientIdBase + "-2", client_opts); + client2.subscribe(topic); + client2.setCallback(this); + + final SocketFactory factory = SocketFactory.getDefault(); + final ArrayList<Socket> sockets = new ArrayList<>(); + SocketFactory testFactory = new SocketFactory() { + public Socket createSocket(String s, int i) throws IOException { + Socket sock = factory.createSocket(s, i); + sockets.add(sock); + return sock; + } + public Socket createSocket(String s, int i, InetAddress a, int i1) { + return null; + } + public Socket createSocket(InetAddress a, int i) { + return null; + } + public Socket createSocket(InetAddress a, int i, InetAddress a1, int i1) { + return null; + } + @Override + public Socket createSocket() { + Socket sock = new Socket(); + sockets.add(sock); + return sock; + } + }; + + MqttClient client = newClient(clientIdBase + "-1"); + MqttTopic willTopic = client.getTopic(topic); + + MqttConnectOptions opts = new TestMqttConnectOptions(); + opts.setSocketFactory(testFactory); + opts.setWill(willTopic, payload, 0, false); + opts.setCleanSession(false); + + client.connect(opts); + + assertTrue(sockets.size() >= 1); + expectConnectionFailure = true; + sockets.get(0).close(); + + waitAtMost(() -> receivedMessagesSize() == 1); + assertArrayEquals(receivedMessages.get(0).getPayload(), payload); + client2.unsubscribe(topic); + disconnect(client2); + } + + @Test public void willIsRetained(TestInfo info) throws MqttException, InterruptedException, IOException { + String clientIdBase = clientId(info); + MqttConnectOptions client2_opts = new TestMqttConnectOptions(); + client2_opts.setCleanSession(true); + MqttClient client2 = newConnectedClient(clientIdBase + "-2", client2_opts); + client2.setCallback(this); + + clearRetained(client2, retainedTopic); + client2.subscribe(retainedTopic, 1); + disconnect(client2); + + final SocketFactory factory = SocketFactory.getDefault(); + final ArrayList<Socket> sockets = new ArrayList<>(); + SocketFactory testFactory = new SocketFactory() { + public Socket createSocket(String s, int i) throws IOException { + Socket sock = factory.createSocket(s, i); + sockets.add(sock); + return sock; + } + public Socket createSocket(String s, int i, InetAddress a, int i1) { + return null; + } + public Socket createSocket(InetAddress a, int i) { + return null; + } + public Socket createSocket(InetAddress a, int i, InetAddress a1, int i1) { + return null; + } + @Override + public Socket createSocket() { + Socket sock = new Socket(); + sockets.add(sock); + return sock; + } + }; + + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + + MqttClient client = newClient(clientIdBase + "-1"); + MqttTopic willTopic = client.getTopic(retainedTopic); + byte[] willPayload = "willpayload".getBytes(); + + client_opts.setSocketFactory(testFactory); + client_opts.setWill(willTopic, willPayload, 1, true); + + client.connect(client_opts); + + assertEquals(1, sockets.size()); + sockets.get(0).close(); + + // let last will propagate after disconnection + waitForTestDelay(); + + client2.connect(client2_opts); + client2.setCallback(this); + client2.subscribe(retainedTopic, 1); + + waitAtMost(() -> receivedMessagesSize() == 1); + assertArrayEquals(receivedMessages.get(0).getPayload(), willPayload); + client2.unsubscribe(topic); + disconnect(client2); + } + + @Test public void subscribeMultiple(TestInfo info) throws MqttException { + String clientIdBase = clientId(info); + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(clientIdBase + "-1", client_opts); + publish(client, "/test-topic/1", 1, "msq1-qos1".getBytes()); + + MqttClient client2 = newConnectedClient(clientIdBase + "-2", client_opts); + client2.setCallback(this); + client2.subscribe("/test-topic/#"); + client2.subscribe("/test-topic/#"); + + publish(client, "/test-topic/2", 0, "msq2-qos0".getBytes()); + publish(client, "/test-topic/3", 1, "msq3-qos1".getBytes()); + publish(client, "/test-topic/4", 2, "msq3-qos2".getBytes()); + publish(client, topic, 0, "msq4-qos0".getBytes()); + publish(client, topic, 1, "msq4-qos1".getBytes()); + + + assertEquals(3, receivedMessages.size()); + disconnect(client); + disconnect(client2); + } + + @Test public void publishMultiple() throws MqttException, InterruptedException { + int pubCount = 50; + for (int subQos=0; subQos <= 2; subQos++){ + for (int pubQos=0; pubQos <= 2; pubQos++){ + // avoid reusing the client in this test as a shared + // client cannot handle connection churn very well. MK. + String cid = "test-sub-qos-" + subQos + "-pub-qos-" + pubQos; + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newClient(brokerUrl, cid); + client.connect(client_opts); + client.subscribe(topic, subQos); + client.setCallback(this); + long start = System.currentTimeMillis(); + for (int i=0; i<pubCount; i++){ + publish(client, topic, pubQos, payload); + } + + waitAtMost(() -> receivedMessagesSize() == pubCount); + System.out.println("publish QOS" + pubQos + " subscribe QOS" + subQos + + ", " + pubCount + " msgs took " + + (lastReceipt - start)/1000.0 + "sec"); + client.disconnect(5000); + receivedMessages.clear(); + } + } + } + + @Test public void topicAuthorisationPublish(TestInfo info) throws Exception { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + client.setCallback(this); + client.subscribe("some/test-topic"); + publish(client, "some/test-topic", 1, "content".getBytes()); + waitAtMost(() -> receivedMessagesSize() == 1); + assertTrue(client.isConnected()); + try { + publish(client, "forbidden-topic", 1, "content".getBytes()); + fail("Publishing on a forbidden topic, an exception should have been thrown"); + client.disconnect(); + } catch(Exception e) { + // OK + } + } + + @Test public void topicAuthorisationSubscribe(TestInfo info) throws Exception { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + client.setCallback(this); + client.subscribe("some/test-topic"); + try { + client.subscribe("forbidden-topic"); + fail("Subscribing to a forbidden topic, an exception should have been thrown"); + client.disconnect(); + } catch(Exception e) { + // OK + e.printStackTrace(); + } + } + + @Test public void lastWillDowngradesQoS2(TestInfo info) throws Exception { + String lastWillTopic = "test-topic-will-downgrades-qos"; + + MqttConnectOptions client2Opts = new TestMqttConnectOptions(); + MqttClient client2 = newConnectedClient(info, client2Opts); + client2.subscribe(lastWillTopic); + client2.setCallback(this); + + final SocketFactory factory = SocketFactory.getDefault(); + final ArrayList<Socket> sockets = new ArrayList<>(); + SocketFactory testFactory = new SocketFactory() { + public Socket createSocket(String s, int i) throws IOException { + Socket sock = factory.createSocket(s, i); + sockets.add(sock); + return sock; + } + public Socket createSocket(String s, int i, InetAddress a, int i1) { + return null; + } + public Socket createSocket(InetAddress a, int i) { + return null; + } + public Socket createSocket(InetAddress a, int i, InetAddress a1, int i1) { + return null; + } + @Override + public Socket createSocket() { + Socket sock = new Socket(); + sockets.add(sock); + return sock; + } + }; + + MqttConnectOptions clientOpts = new TestMqttConnectOptions(); + + MqttClient client = newClient("test-topic-will-downgrades-qos"); + clientOpts.setSocketFactory(testFactory); + MqttTopic willTopic = client.getTopic(lastWillTopic); + clientOpts.setWill(willTopic, payload, 2, false); + clientOpts.setCleanSession(false); + client.connect(clientOpts); + + waitAtMost(() -> sockets.size() == 1); + expectConnectionFailure = true; + sockets.get(0).close(); + + // let some time after disconnection + waitAtMost(() -> receivedMessagesSize() == 1); + assertEquals(1, receivedMessages.size()); + disconnect(client2); + } + + @Test public void lastWillNotSentOnRestrictedTopic(TestInfo info) throws Exception { + MqttConnectOptions client2_opts = new TestMqttConnectOptions(); + + MqttClient client2 = newConnectedClient(info, client2_opts); + // topic authorized for subscription, restricted for publishing + String lastWillTopic = "last-will"; + client2.subscribe(lastWillTopic); + client2.setCallback(this); + + final SocketFactory factory = SocketFactory.getDefault(); + final ArrayList<Socket> sockets = new ArrayList<>(); + SocketFactory testFactory = new SocketFactory() { + public Socket createSocket(String s, int i) throws IOException { + Socket sock = factory.createSocket(s, i); + sockets.add(sock); + return sock; + } + public Socket createSocket(String s, int i, InetAddress a, int i1) { + return null; + } + public Socket createSocket(InetAddress a, int i) { + return null; + } + public Socket createSocket(InetAddress a, int i, InetAddress a1, int i1) { + return null; + } + @Override + public Socket createSocket() { + Socket sock = new Socket(); + sockets.add(sock); + return sock; + } + }; + + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + + MqttClient client = newClient("last-will-not-sent-on-restricted-topic"); + client_opts.setSocketFactory(testFactory); + MqttTopic willTopic = client.getTopic(lastWillTopic); + client_opts.setWill(willTopic, payload, 0, false); + client_opts.setCleanSession(false); + client.connect(client_opts); + + assertEquals(1, sockets.size()); + expectConnectionFailure = true; + sockets.get(0).close(); + + // let some time after disconnection + waitForTestDelay(); + assertEquals(0, receivedMessages.size()); + disconnect(client2); + } + + @Test public void topicAuthorisationVariableExpansion(TestInfo info) throws Exception { + final String client_id = clientId(info); + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(client_id, client_opts); + client.setCallback(this); + String topicWithExpandedVariables = "guest/" + client_id + "/a"; + client.subscribe(topicWithExpandedVariables); + publish(client, topicWithExpandedVariables, 1, "content".getBytes()); + waitAtMost(() -> receivedMessagesSize() == 1); + assertTrue(client.isConnected()); + try { + publish(client, "guest/WrongClientId/a", 1, "content".getBytes()); + fail("Publishing on a forbidden topic, an exception should have been thrown"); + client.disconnect(); + } catch(Exception e) { + // OK + } + } + + @Test public void interopM2A(TestInfo info) throws MqttException, IOException, InterruptedException, TimeoutException { + setUpAmqp(); + String queue = ch.queueDeclare().getQueue(); + ch.queueBind(queue, "amq.topic", topic); + + byte[] interopPayload = "interop-body".getBytes(); + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + publish(client, topic, 1, interopPayload); + disconnect(client); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference<byte[]> messageBody = new AtomicReference<>(); + ch.basicConsume(queue, true, new DefaultConsumer(ch) { + @Override + public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { + messageBody.set(body); + latch.countDown(); + } + }); + assertTrue(latch.await(EXPECT_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS)); + assertEquals(new String(interopPayload), new String(messageBody.get())); + assertNull(ch.basicGet(queue, true)); + tearDownAmqp(); + } + + @Test public void interopA2M(TestInfo info) throws MqttException, IOException, InterruptedException, TimeoutException { + MqttConnectOptions client_opts = new TestMqttConnectOptions(); + MqttClient client = newConnectedClient(info, client_opts); + client.setCallback(this); + client.subscribe(topic, 1); + + setUpAmqp(); + ch.basicPublish("amq.topic", topic, MessageProperties.MINIMAL_BASIC, payload); + tearDownAmqp(); + + waitAtMost(() -> receivedMessagesSize() == 1); + client.disconnect(); + } + + private void publish(MqttClient client, String topicName, int qos, byte[] payload) throws MqttException { + publish(client, topicName, qos, payload, false); + } + + private void publish(MqttClient client, String topicName, int qos, byte[] payload, boolean retained) throws MqttException { + MqttTopic topic = client.getTopic(topicName); + MqttMessage message = new MqttMessage(payload); + message.setQos(qos); + message.setRetained(retained); + MqttDeliveryToken token = topic.publish(message); + token.waitForCompletion(); + } + + private void publishRetained(MqttClient client, String topicName, int qos, byte[] payload) throws MqttException { + publish(client, topicName, qos, payload, true); + } + + private void clearRetained(MqttClient client, String topicName) throws MqttException { + publishRetained(client, topicName, 1, "".getBytes()); + } + + public void connectionLost(Throwable cause) { + if (!expectConnectionFailure) + fail("Connection unexpectedly lost"); + } + + public void messageArrived(String topic, MqttMessage message) throws Exception { + lastReceipt = System.currentTimeMillis(); + receivedMessages.add(message); + if (failOnDelivery) { + throw new Exception("unexpected delivery on topic " + topic); + } + } + + public void deliveryComplete(IMqttDeliveryToken token) { + } + + private Integer receivedMessagesSize() { + return receivedMessages.size(); + } + + private void waitForTestDelay() { + try { + Thread.sleep(testDelay); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + public static void waitAtMost(BooleanSupplier condition) throws InterruptedException { + if (condition.getAsBoolean()) { + return; + } + int waitTime = 100; + int waitedTime = 0; + long timeoutInMs = EXPECT_TIMEOUT.toMillis(); + while (waitedTime <= timeoutInMs) { + Thread.sleep(waitTime); + if (condition.getAsBoolean()) { + return; + } + waitedTime += waitTime; + } + fail("Waited " + EXPECT_TIMEOUT.get(ChronoUnit.SECONDS) + " second(s), condition never got true"); + } +} diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/rabbit-test.sh b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/rabbit-test.sh new file mode 100755 index 0000000000..cba9bcd493 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/rabbit-test.sh @@ -0,0 +1,8 @@ +#!/bin/sh +CTL=$1 +USER="O=client,CN=$(hostname)" + +# Test direct connections +$CTL add_user "$USER" '' +$CTL set_permissions -p / "$USER" ".*" ".*" ".*" +$CTL set_topic_permissions -p / "$USER" "amq.topic" "test-topic|test-retained-topic|.*topic.*" "test-topic|test-retained-topic|.*topic.*|last-will" diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/setup-rabbit-test.sh b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/setup-rabbit-test.sh new file mode 100644 index 0000000000..2e2282ee07 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/setup-rabbit-test.sh @@ -0,0 +1,2 @@ +#!/bin/sh -e +sh -e `dirname $0`/rabbit-test.sh "$DEPS_DIR/rabbit/scripts/rabbitmqctl -n $RABBITMQ_NODENAME" diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/tls/MqttSSLTest.java b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/tls/MqttSSLTest.java new file mode 100644 index 0000000000..2ea4c7a638 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/tls/MqttSSLTest.java @@ -0,0 +1,157 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. +// +// Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +// + +package com.rabbitmq.mqtt.test.tls; + +import org.eclipse.paho.client.mqttv3.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.*; + + +/** + * MQTT v3.1 tests + * + */ + +public class MqttSSLTest implements MqttCallback { + + private final String brokerUrl = "ssl://" + getHost() + ":" + getPort(); + private String clientId; + private String clientId2; + private MqttClient client; + private MqttClient client2; + private MqttConnectOptions conOpt; + + private volatile List<MqttMessage> receivedMessages; + private volatile boolean expectConnectionFailure; + + private static String getPort() { + Object port = System.getProperty("mqtt.ssl.port"); + assertNotNull(port); + return port.toString(); + } + + private static String getHost() { + Object host = System.getProperty("hostname"); + assertNotNull(host); + return host.toString(); + } + + // override 10s limit + private class MyConnOpts extends MqttConnectOptions { + private int keepAliveInterval = 60; + + @Override + public void setKeepAliveInterval(int keepAliveInterval) { + this.keepAliveInterval = keepAliveInterval; + } + + @Override + public int getKeepAliveInterval() { + return keepAliveInterval; + } + } + + + @BeforeEach + public void setUp() throws MqttException, IOException { + clientId = getClass().getSimpleName() + ((int) (10000 * Math.random())); + clientId2 = clientId + "-2"; + client = new MqttClient(brokerUrl, clientId, null); + client2 = new MqttClient(brokerUrl, clientId2, null); + conOpt = new MyConnOpts(); + conOpt.setSocketFactory(MutualAuth.getSSLContextWithoutCert().getSocketFactory()); + setConOpts(conOpt); + receivedMessages = Collections.synchronizedList(new ArrayList<MqttMessage>()); + expectConnectionFailure = false; + } + + @AfterEach + public void tearDown() throws MqttException { + // clean any sticky sessions + setConOpts(conOpt); + client = new MqttClient(brokerUrl, clientId, null); + try { + client.connect(conOpt); + client.disconnect(); + } catch (Exception ignored) { + } + + client2 = new MqttClient(brokerUrl, clientId2, null); + try { + client2.connect(conOpt); + client2.disconnect(); + } catch (Exception ignored) { + } + } + + + private void setConOpts(MqttConnectOptions conOpts) { + conOpts.setCleanSession(true); + conOpts.setKeepAliveInterval(60); + } + + @Test + public void certLogin() throws MqttException { + try { + conOpt.setSocketFactory(MutualAuth.getSSLContextWithClientCert().getSocketFactory()); + client.connect(conOpt); + } catch (Exception e) { + e.printStackTrace(); + fail("Exception: " + e.getMessage()); + } + } + + + @Test public void invalidUser() throws MqttException { + conOpt.setUserName("invalid-user"); + try { + client.connect(conOpt); + fail("Authentication failure expected"); + } catch (MqttException ex) { + assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode()); + } catch (Exception e) { + e.printStackTrace(); + fail("Exception: " + e.getMessage()); + } + } + + @Test public void invalidPassword() throws MqttException { + conOpt.setUserName("invalid-user"); + conOpt.setPassword("invalid-password".toCharArray()); + try { + client.connect(conOpt); + fail("Authentication failure expected"); + } catch (MqttException ex) { + assertEquals(MqttException.REASON_CODE_FAILED_AUTHENTICATION, ex.getReasonCode()); + } catch (Exception e) { + e.printStackTrace(); + fail("Exception: " + e.getMessage()); + } + } + + + public void connectionLost(Throwable cause) { + if (!expectConnectionFailure) + fail("Connection unexpectedly lost"); + } + + public void messageArrived(String topic, MqttMessage message) throws Exception { + receivedMessages.add(message); + } + + public void deliveryComplete(IMqttDeliveryToken token) { + } +} diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/tls/MutualAuth.java b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/tls/MutualAuth.java new file mode 100644 index 0000000000..081cae4052 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/java/com/rabbitmq/mqtt/test/tls/MutualAuth.java @@ -0,0 +1,89 @@ +package com.rabbitmq.mqtt.test.tls; + +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import java.io.IOException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; +import java.util.Arrays; +import java.util.List; +import java.io.FileInputStream; + + +public class MutualAuth { + + private MutualAuth() { + + } + + private static String getStringProperty(String propertyName) throws IllegalArgumentException { + Object value = System.getProperty(propertyName); + if (value == null) throw new IllegalArgumentException("Property: " + propertyName + " not found"); + return value.toString(); + } + + private static TrustManagerFactory getServerTrustManagerFactory() throws NoSuchAlgorithmException, CertificateException, IOException, KeyStoreException { + String keystorePath = System.getProperty("test-keystore.ca"); + char[] trustPhrase = getStringProperty("test-keystore.password").toCharArray(); + MutualAuth dummy = new MutualAuth(); + + // Server TrustStore + KeyStore tks = KeyStore.getInstance("JKS"); + tks.load(new FileInputStream(keystorePath), trustPhrase); + + TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); + tmf.init(tks); + + return tmf; + } + + public static SSLContext getSSLContextWithClientCert() throws IOException { + + char[] clientPhrase = getStringProperty("test-client-cert.password").toCharArray(); + + String p12Path = System.getProperty("test-client-cert.path"); + + MutualAuth dummy = new MutualAuth(); + try { + SSLContext sslContext = getVanillaSSLContext(); + // Client Keystore + KeyStore ks = KeyStore.getInstance("PKCS12"); + ks.load(new FileInputStream(p12Path), clientPhrase); + KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); + kmf.init(ks, clientPhrase); + + sslContext.init(kmf.getKeyManagers(), getServerTrustManagerFactory().getTrustManagers(), null); + return sslContext; + } catch (Exception e) { + throw new IOException(e); + } + + } + + private static SSLContext getVanillaSSLContext() throws NoSuchAlgorithmException { + SSLContext result = null; + List<String> xs = Arrays.asList("TLSv1.2", "TLSv1.1", "TLSv1"); + for(String x : xs) { + try { + return SSLContext.getInstance(x); + } catch (NoSuchAlgorithmException nae) { + // keep trying + } + } + throw new NoSuchAlgorithmException("Could not obtain an SSLContext for TLS 1.0-1.2"); + } + + public static SSLContext getSSLContextWithoutCert() throws IOException { + try { + SSLContext sslContext = getVanillaSSLContext(); + sslContext.init(null, getServerTrustManagerFactory().getTrustManagers(), null); + return sslContext; + } catch (Exception e) { + throw new IOException(e); + } + } + +} diff --git a/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/scripts/remove_old_test_keystores.groovy b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/scripts/remove_old_test_keystores.groovy new file mode 100644 index 0000000000..6864a41e29 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/java_SUITE_data/src/test/scripts/remove_old_test_keystores.groovy @@ -0,0 +1,10 @@ +def dir = new File(project.build.directory) + +dir.mkdir() + +// This pattern starts with `.*`. This is normally useless and even +// inefficient but the matching doesn't work without it... +def pattern = ~/.*\.keystore$/ +dir.eachFileMatch(pattern) { file -> + file.delete() +} diff --git a/deps/rabbitmq_mqtt/test/mqtt_machine_SUITE.erl b/deps/rabbitmq_mqtt/test/mqtt_machine_SUITE.erl new file mode 100644 index 0000000000..abdc3506dc --- /dev/null +++ b/deps/rabbitmq_mqtt/test/mqtt_machine_SUITE.erl @@ -0,0 +1,73 @@ +-module(mqtt_machine_SUITE). + +-compile(export_all). + +-export([ + ]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include("mqtt_machine.hrl"). + +%%%=================================================================== +%%% Common Test callbacks +%%%=================================================================== + +all() -> + [ + {group, tests} + ]. + + +all_tests() -> + [ + basics + ]. + +groups() -> + [ + {tests, [], all_tests()} + ]. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_Group, Config) -> + Config. + +end_per_group(_Group, _Config) -> + ok. + +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(_TestCase, _Config) -> + ok. + +%%%=================================================================== +%%% Test cases +%%%=================================================================== + +basics(_Config) -> + S0 = mqtt_machine:init(#{}), + ClientId = <<"id1">>, + {S1, ok, _} = mqtt_machine:apply(meta(1), {register, ClientId, self()}, S0), + ?assertMatch(#machine_state{client_ids = Ids} when map_size(Ids) == 1, S1), + {S2, ok, _} = mqtt_machine:apply(meta(2), {register, ClientId, self()}, S1), + ?assertMatch(#machine_state{client_ids = Ids} when map_size(Ids) == 1, S2), + {S3, ok, _} = mqtt_machine:apply(meta(3), {down, self(), noproc}, S2), + ?assertMatch(#machine_state{client_ids = Ids} when map_size(Ids) == 0, S3), + {S4, ok, _} = mqtt_machine:apply(meta(3), {unregister, ClientId, self()}, S2), + ?assertMatch(#machine_state{client_ids = Ids} when map_size(Ids) == 0, S4), + + ok. + +%% Utility + +meta(Idx) -> + #{index => Idx, + term => 1, + ts => erlang:system_time(millisecond)}. diff --git a/deps/rabbitmq_mqtt/test/processor_SUITE.erl b/deps/rabbitmq_mqtt/test/processor_SUITE.erl new file mode 100644 index 0000000000..e38a1d5318 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/processor_SUITE.erl @@ -0,0 +1,211 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. + + +-module(processor_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("amqp_client/include/amqp_client.hrl"). + +all() -> + [ + {group, non_parallel_tests} + ]. + +groups() -> + [ + {non_parallel_tests, [], [ + ignores_colons_in_username_if_option_set, + interprets_colons_in_username_if_option_not_set, + get_vhosts_from_global_runtime_parameter, + get_vhost, + add_client_id_to_adapter_info + ]} + ]. + +suite() -> + [{timetrap, {seconds, 60}}]. + +init_per_suite(Config) -> + ok = application:load(rabbitmq_mqtt), + Config. +end_per_suite(Config) -> + ok = application:unload(rabbitmq_mqtt), + Config. +init_per_group(_, Config) -> Config. +end_per_group(_, Config) -> Config. +init_per_testcase(get_vhost, Config) -> + mnesia:start(), + mnesia:create_table(rabbit_runtime_parameters, [ + {attributes, record_info(fields, runtime_parameters)}, + {record_name, runtime_parameters}]), + Config; +init_per_testcase(_, Config) -> Config. +end_per_testcase(get_vhost, Config) -> + mnesia:stop(), + Config; +end_per_testcase(_, Config) -> Config. + +ignore_colons(B) -> application:set_env(rabbitmq_mqtt, ignore_colons_in_username, B). + +ignores_colons_in_username_if_option_set(_Config) -> + ignore_colons(true), + ?assertEqual({rabbit_mqtt_util:env(vhost), <<"a:b:c">>}, + rabbit_mqtt_processor:get_vhost_username(<<"a:b:c">>)). + +interprets_colons_in_username_if_option_not_set(_Config) -> + ignore_colons(false), + ?assertEqual({<<"a:b">>, <<"c">>}, + rabbit_mqtt_processor:get_vhost_username(<<"a:b:c">>)). + +get_vhosts_from_global_runtime_parameter(_Config) -> + MappingParameter = [ + {<<"O=client,CN=dummy1">>, <<"vhost1">>}, + {<<"O=client,CN=dummy2">>, <<"vhost2">>} + ], + <<"vhost1">> = rabbit_mqtt_processor:get_vhost_from_user_mapping(<<"O=client,CN=dummy1">>, MappingParameter), + <<"vhost2">> = rabbit_mqtt_processor:get_vhost_from_user_mapping(<<"O=client,CN=dummy2">>, MappingParameter), + undefined = rabbit_mqtt_processor:get_vhost_from_user_mapping(<<"O=client,CN=dummy3">>, MappingParameter), + undefined = rabbit_mqtt_processor:get_vhost_from_user_mapping(<<"O=client,CN=dummy3">>, not_found). + +get_vhost(_Config) -> + clear_vhost_global_parameters(), + + %% not a certificate user, no cert/vhost mapping, no vhost in user + %% should use default vhost + {_, {<<"/">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, none, 1883), + {_, {<<"/">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, undefined, 1883), + clear_vhost_global_parameters(), + + %% not a certificate user, no cert/vhost mapping, vhost in user + %% should use vhost in user + {_, {<<"somevhost">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"somevhost:guest">>, none, 1883), + clear_vhost_global_parameters(), + + %% certificate user, no cert/vhost mapping + %% should use default vhost + {_, {<<"/">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, <<"O=client,CN=dummy">>, 1883), + clear_vhost_global_parameters(), + + %% certificate user, cert/vhost mapping with global runtime parameter + %% should use mapping + set_global_parameter(mqtt_default_vhosts, [ + {<<"O=client,CN=dummy">>, <<"somevhost">>}, + {<<"O=client,CN=otheruser">>, <<"othervhost">>} + ]), + {_, {<<"somevhost">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, <<"O=client,CN=dummy">>, 1883), + clear_vhost_global_parameters(), + + %% certificate user, cert/vhost mapping with global runtime parameter, but no key for the user + %% should use default vhost + set_global_parameter(mqtt_default_vhosts, [{<<"O=client,CN=otheruser">>, <<"somevhost">>}]), + {_, {<<"/">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, <<"O=client,CN=dummy">>, 1883), + clear_vhost_global_parameters(), + + %% not a certificate user, port/vhost mapping + %% should use mapping + set_global_parameter(mqtt_port_to_vhost_mapping, [ + {<<"1883">>, <<"somevhost">>}, + {<<"1884">>, <<"othervhost">>} + ]), + {_, {<<"somevhost">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, none, 1883), + clear_vhost_global_parameters(), + + %% not a certificate user, port/vhost mapping, but vhost in username + %% vhost in username should take precedence + set_global_parameter(mqtt_port_to_vhost_mapping, [ + {<<"1883">>, <<"somevhost">>}, + {<<"1884">>, <<"othervhost">>} + ]), + {_, {<<"vhostinusername">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"vhostinusername:guest">>, none, 1883), + clear_vhost_global_parameters(), + + %% not a certificate user, port/vhost mapping, but no mapping for this port + %% should use default vhost + set_global_parameter(mqtt_port_to_vhost_mapping, [ + {<<"1884">>, <<"othervhost">>} + ]), + {_, {<<"/">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, none, 1883), + clear_vhost_global_parameters(), + + %% certificate user, port/vhost parameter, mapping, no cert/vhost mapping + %% should use port/vhost mapping + set_global_parameter(mqtt_port_to_vhost_mapping, [ + {<<"1883">>, <<"somevhost">>}, + {<<"1884">>, <<"othervhost">>} + ]), + {_, {<<"somevhost">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, <<"O=client,CN=dummy">>, 1883), + clear_vhost_global_parameters(), + + %% certificate user, port/vhost parameter but no mapping, cert/vhost mapping + %% should use cert/vhost mapping + set_global_parameter(mqtt_default_vhosts, [ + {<<"O=client,CN=dummy">>, <<"somevhost">>}, + {<<"O=client,CN=otheruser">>, <<"othervhost">>} + ]), + set_global_parameter(mqtt_port_to_vhost_mapping, [ + {<<"1884">>, <<"othervhost">>} + ]), + {_, {<<"somevhost">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, <<"O=client,CN=dummy">>, 1883), + clear_vhost_global_parameters(), + + %% certificate user, port/vhost parameter, cert/vhost parameter + %% cert/vhost parameter takes precedence + set_global_parameter(mqtt_default_vhosts, [ + {<<"O=client,CN=dummy">>, <<"cert-somevhost">>}, + {<<"O=client,CN=otheruser">>, <<"othervhost">>} + ]), + set_global_parameter(mqtt_port_to_vhost_mapping, [ + {<<"1883">>, <<"port-vhost">>}, + {<<"1884">>, <<"othervhost">>} + ]), + {_, {<<"cert-somevhost">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, <<"O=client,CN=dummy">>, 1883), + clear_vhost_global_parameters(), + + %% certificate user, no port/vhost or cert/vhost mapping, vhost in username + %% should use vhost in username + {_, {<<"vhostinusername">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"vhostinusername:guest">>, <<"O=client,CN=dummy">>, 1883), + + %% not a certificate user, port/vhost parameter, cert/vhost parameter + %% port/vhost mapping is used, as cert/vhost should not be used + set_global_parameter(mqtt_default_vhosts, [ + {<<"O=cert">>, <<"cert-somevhost">>}, + {<<"O=client,CN=otheruser">>, <<"othervhost">>} + ]), + set_global_parameter(mqtt_port_to_vhost_mapping, [ + {<<"1883">>, <<"port-vhost">>}, + {<<"1884">>, <<"othervhost">>} + ]), + {_, {<<"port-vhost">>, <<"guest">>}} = rabbit_mqtt_processor:get_vhost(<<"guest">>, none, 1883), + clear_vhost_global_parameters(), + ok. + +add_client_id_to_adapter_info(_Config) -> + TestFun = fun(AdapterInfo) -> + Info0 = rabbit_mqtt_processor:add_client_id_to_adapter_info(<<"my-client-id">>, AdapterInfo), + AdditionalInfo0 = Info0#amqp_adapter_info.additional_info, + ?assertEqual(#{<<"client_id">> => <<"my-client-id">>}, proplists:get_value(variable_map, AdditionalInfo0)), + ClientProperties = proplists:get_value(client_properties, AdditionalInfo0), + ?assertEqual([{client_id,longstr,<<"my-client-id">>}], ClientProperties) + end, + lists:foreach(TestFun, [#amqp_adapter_info{}, #amqp_adapter_info{additional_info = [{client_properties, []}]}]), + ok. + +set_global_parameter(Key, Term) -> + InsertParameterFun = fun () -> + mnesia:write(rabbit_runtime_parameters, #runtime_parameters{key = Key, value = Term}, write) + end, + + {atomic, ok} = mnesia:transaction(InsertParameterFun). + +clear_vhost_global_parameters() -> + DeleteParameterFun = fun () -> + ok = mnesia:delete(rabbit_runtime_parameters, mqtt_default_vhosts, write), + ok = mnesia:delete(rabbit_runtime_parameters, mqtt_port_to_vhost_mapping, write) + end, + {atomic, ok} = mnesia:transaction(DeleteParameterFun). diff --git a/deps/rabbitmq_mqtt/test/proxy_protocol_SUITE.erl b/deps/rabbitmq_mqtt/test/proxy_protocol_SUITE.erl new file mode 100644 index 0000000000..5403de23d3 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/proxy_protocol_SUITE.erl @@ -0,0 +1,125 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% +-module(proxy_protocol_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +-define(TIMEOUT, 5000). + +all() -> + [ + {group, non_parallel_tests} + ]. + +groups() -> + [ + {non_parallel_tests, [], [ + proxy_protocol, + proxy_protocol_tls + ]} + ]. + +init_per_suite(Config) -> + rabbit_ct_helpers:log_environment(), + Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"), + Config1 = rabbit_ct_helpers:set_config(Config, [ + {rmq_nodename_suffix, Suffix}, + {rmq_certspwd, "bunnychow"}, + {rabbitmq_ct_tls_verify, verify_none} + ]), + MqttConfig = mqtt_config(), + rabbit_ct_helpers:run_setup_steps(Config1, + [ fun(Conf) -> merge_app_env(MqttConfig, Conf) end ] ++ + rabbit_ct_broker_helpers:setup_steps() ++ + rabbit_ct_client_helpers:setup_steps()). + +mqtt_config() -> + {rabbitmq_mqtt, [ + {proxy_protocol, true}, + {ssl_cert_login, true}, + {allow_anonymous, true}]}. + +end_per_suite(Config) -> + rabbit_ct_helpers:run_teardown_steps(Config, + rabbit_ct_client_helpers:teardown_steps() ++ + rabbit_ct_broker_helpers:teardown_steps()). + +init_per_group(_, Config) -> Config. +end_per_group(_, Config) -> Config. + +init_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_started(Config, Testcase). + +end_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_finished(Config, Testcase). + +proxy_protocol(Config) -> + Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + {ok, Socket} = gen_tcp:connect({127,0,0,1}, Port, + [binary, {active, false}, {packet, raw}]), + ok = inet:send(Socket, "PROXY TCP4 192.168.1.1 192.168.1.2 80 81\r\n"), + ok = inet:send(Socket, mqtt_3_1_1_connect_frame()), + {ok, _Packet} = gen_tcp:recv(Socket, 0, ?TIMEOUT), + ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0, + ?MODULE, connection_name, []), + match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]), + gen_tcp:close(Socket), + ok. + +proxy_protocol_tls(Config) -> + app_utils:start_applications([asn1, crypto, public_key, ssl]), + Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt_tls), + {ok, Socket} = gen_tcp:connect({127,0,0,1}, Port, + [binary, {active, false}, {packet, raw}]), + ok = inet:send(Socket, "PROXY TCP4 192.168.1.1 192.168.1.2 80 81\r\n"), + {ok, SslSocket} = ssl:connect(Socket, [], ?TIMEOUT), + ok = ssl:send(SslSocket, mqtt_3_1_1_connect_frame()), + {ok, _Packet} = ssl:recv(SslSocket, 0, ?TIMEOUT), + ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0, + ?MODULE, connection_name, []), + match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]), + gen_tcp:close(Socket), + ok. + +connection_name() -> + Connections = ets:tab2list(connection_created), + {_Key, Values} = lists:nth(1, Connections), + {_, Name} = lists:keyfind(name, 1, Values), + Name. + +merge_app_env(MqttConfig, Config) -> + rabbit_ct_helpers:merge_app_env(Config, MqttConfig). + +mqtt_3_1_1_connect_frame() -> + <<16, + 24, + 0, + 4, + 77, + 81, + 84, + 84, + 4, + 2, + 0, + 60, + 0, + 12, + 84, + 101, + 115, + 116, + 67, + 111, + 110, + 115, + 117, + 109, + 101, + 114>>. diff --git a/deps/rabbitmq_mqtt/test/rabbit_auth_backend_mqtt_mock.erl b/deps/rabbitmq_mqtt/test/rabbit_auth_backend_mqtt_mock.erl new file mode 100644 index 0000000000..5272138c6b --- /dev/null +++ b/deps/rabbitmq_mqtt/test/rabbit_auth_backend_mqtt_mock.erl @@ -0,0 +1,45 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2019-2020 VMware, Inc. or its affiliates. All rights reserved. +%% + +%% A mock authn/authz that records information during calls. For testing purposes only. + +-module(rabbit_auth_backend_mqtt_mock). +-include_lib("rabbit_common/include/rabbit.hrl"). + +-behaviour(rabbit_authn_backend). +-behaviour(rabbit_authz_backend). + +-export([user_login_authentication/2, user_login_authorization/2, + check_vhost_access/3, check_resource_access/4, check_topic_access/4, + state_can_expire/0, + get/1]). + +user_login_authentication(_, AuthProps) -> + ets:new(?MODULE, [set, public, named_table]), + ets:insert(?MODULE, {authentication, AuthProps}), + {ok, #auth_user{username = <<"dummy">>, + tags = [], + impl = none}}. + +user_login_authorization(_, _) -> + io:format("login authorization"), + {ok, does_not_matter}. + +check_vhost_access(#auth_user{}, _VHostPath, AuthzData) -> + ets:insert(?MODULE, {vhost_access, AuthzData}), + true. +check_resource_access(#auth_user{}, #resource{}, _Permission, AuthzContext) -> + ets:insert(?MODULE, {resource_access, AuthzContext}), + true. +check_topic_access(#auth_user{}, #resource{}, _Permission, TopicContext) -> + ets:insert(?MODULE, {topic_access, TopicContext}), + true. + +state_can_expire() -> false. + +get(K) -> + ets:lookup(?MODULE, K). diff --git a/deps/rabbitmq_mqtt/test/rabbitmq_mqtt.app b/deps/rabbitmq_mqtt/test/rabbitmq_mqtt.app new file mode 100644 index 0000000000..c4083ec5fc --- /dev/null +++ b/deps/rabbitmq_mqtt/test/rabbitmq_mqtt.app @@ -0,0 +1,19 @@ +{application, rabbitmq_mqtt, + [{description, "RabbitMQ MQTT Adapter"}, + {vsn, "%%VSN%%"}, + {modules, []}, + {registered, []}, + {mod, {rabbit_mqtt, []}}, + {env, [{default_user, "guest_user"}, + {default_pass, "guest_pass"}, + {ssl_cert_login,false}, + {allow_anonymous, true}, + {vhost, "/"}, + {exchange, "amq.topic"}, + {subscription_ttl, 1800000}, % 30 min + {prefetch, 10}, + {ssl_listeners, []}, + {tcp_listeners, [1883]}, + {tcp_listen_options, [{backlog, 128}, + {nodelay, true}]}]}, + {applications, [kernel, stdlib, rabbit, amqp_client]}]}. diff --git a/deps/rabbitmq_mqtt/test/reader_SUITE.erl b/deps/rabbitmq_mqtt/test/reader_SUITE.erl new file mode 100644 index 0000000000..b94fdb5920 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/reader_SUITE.erl @@ -0,0 +1,166 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% +-module(reader_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +all() -> + [ + {group, non_parallel_tests} + ]. + +groups() -> + [ + {non_parallel_tests, [], [ + block, + handle_invalid_frames, + stats + ]} + ]. + +suite() -> + [{timetrap, {seconds, 60}}]. + +%% ------------------------------------------------------------------- +%% Testsuite setup/teardown. +%% ------------------------------------------------------------------- + +merge_app_env(Config) -> + rabbit_ct_helpers:merge_app_env(Config, + {rabbit, [ + {collect_statistics, basic}, + {collect_statistics_interval, 100} + ]}). + +init_per_suite(Config) -> + rabbit_ct_helpers:log_environment(), + Config1 = rabbit_ct_helpers:set_config(Config, [ + {rmq_nodename_suffix, ?MODULE}, + {rmq_extra_tcp_ports, [tcp_port_mqtt_extra, + tcp_port_mqtt_tls_extra]} + ]), + rabbit_ct_helpers:run_setup_steps(Config1, + [ fun merge_app_env/1 ] ++ + rabbit_ct_broker_helpers:setup_steps() ++ + rabbit_ct_client_helpers:setup_steps()). + +end_per_suite(Config) -> + rabbit_ct_helpers:run_teardown_steps(Config, + rabbit_ct_client_helpers:teardown_steps() ++ + rabbit_ct_broker_helpers:teardown_steps()). + +init_per_group(_, Config) -> + Config. + +end_per_group(_, Config) -> + Config. + +init_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_started(Config, Testcase). + +end_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_finished(Config, Testcase). + + +%% ------------------------------------------------------------------- +%% Testsuite cases +%% ------------------------------------------------------------------- + +block(Config) -> + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + {ok, C} = emqttc:start_link([{host, "localhost"}, + {port, P}, + {client_id, <<"simpleClient">>}, + {proto_ver, 3}, + {logger, info}, + {puback_timeout, 1}]), + %% Only here to ensure the connection is really up + emqttc:subscribe(C, <<"TopicA">>, qos0), + emqttc:publish(C, <<"TopicA">>, <<"Payload">>), + expect_publishes(<<"TopicA">>, [<<"Payload">>]), + emqttc:unsubscribe(C, [<<"TopicA">>]), + + emqttc:subscribe(C, <<"Topic1">>, qos0), + + %% Not blocked + {ok, _} = emqttc:sync_publish(C, <<"Topic1">>, <<"Not blocked yet">>, + [{qos, 1}]), + + ok = rpc(Config, vm_memory_monitor, set_vm_memory_high_watermark, [0.00000001]), + ok = rpc(Config, rabbit_alarm, set_alarm, [{{resource_limit, memory, node()}, []}]), + + %% Let it block + timer:sleep(100), + %% Blocked, but still will publish + {error, ack_timeout} = emqttc:sync_publish(C, <<"Topic1">>, <<"Now blocked">>, + [{qos, 1}]), + + %% Blocked + {error, ack_timeout} = emqttc:sync_publish(C, <<"Topic1">>, + <<"Blocked">>, [{qos, 1}]), + + rpc(Config, vm_memory_monitor, set_vm_memory_high_watermark, [0.4]), + rpc(Config, rabbit_alarm, clear_alarm, [{resource_limit, memory, node()}]), + + %% Let alarms clear + timer:sleep(1000), + + expect_publishes(<<"Topic1">>, [<<"Not blocked yet">>, + <<"Now blocked">>, + <<"Blocked">>]), + + emqttc:disconnect(C). + +handle_invalid_frames(Config) -> + N = rpc(Config, ets, info, [connection_metrics, size]), + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + {ok, C} = gen_tcp:connect("localhost", P, []), + Bin = <<"GET / HTTP/1.1\r\nHost: www.rabbitmq.com\r\nUser-Agent: curl/7.43.0\r\nAccept: */*">>, + gen_tcp:send(C, Bin), + gen_tcp:close(C), + %% No new stats entries should be inserted as connection never got to initialize + N = rpc(Config, ets, info, [connection_metrics, size]). + +stats(Config) -> + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + %% CMN = rpc(Config, ets, info, [connection_metrics, size]), + %% CCMN = rpc(Config, ets, info, [connection_coarse_metrics, size]), + {ok, C} = emqttc:start_link([{host, "localhost"}, + {port, P}, + {client_id, <<"simpleClient">>}, + {proto_ver, 3}, + {logger, info}, + {puback_timeout, 1}]), + %% Ensure that there are some stats + emqttc:subscribe(C, <<"TopicA">>, qos0), + emqttc:publish(C, <<"TopicA">>, <<"Payload">>), + expect_publishes(<<"TopicA">>, [<<"Payload">>]), + emqttc:unsubscribe(C, [<<"TopicA">>]), + timer:sleep(1000), %% Wait for stats to be emitted, which it does every 100ms + %% Retrieve the connection Pid + [{_, Reader}] = rpc(Config, rabbit_mqtt_collector, list, []), + [{_, Pid}] = rpc(Config, rabbit_mqtt_reader, info, [Reader, [connection]]), + %% Verify the content of the metrics, garbage_collection must be present + [{Pid, Props}] = rpc(Config, ets, lookup, [connection_metrics, Pid]), + true = proplists:is_defined(garbage_collection, Props), + %% If the coarse entry is present, stats were successfully emitted + [{Pid, _, _, _, _}] = rpc(Config, ets, lookup, + [connection_coarse_metrics, Pid]), + emqttc:disconnect(C). + +expect_publishes(_Topic, []) -> ok; +expect_publishes(Topic, [Payload|Rest]) -> + receive + {publish, Topic, Payload} -> expect_publishes(Topic, Rest) + after 5000 -> + throw({publish_not_delivered, Payload}) + end. + +rpc(Config, M, F, A) -> + rabbit_ct_broker_helpers:rpc(Config, 0, M, F, A). diff --git a/deps/rabbitmq_mqtt/test/retainer_SUITE.erl b/deps/rabbitmq_mqtt/test/retainer_SUITE.erl new file mode 100644 index 0000000000..22b72a8d87 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/retainer_SUITE.erl @@ -0,0 +1,144 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. +%% +-module(retainer_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). + +all() -> + [ + {group, non_parallel_tests} + ]. + +groups() -> + [ + {non_parallel_tests, [], [ + coerce_configuration_data, + should_translate_amqp2mqtt_on_publish, + should_translate_amqp2mqtt_on_retention, + should_translate_amqp2mqtt_on_retention_search + ]} + ]. + +suite() -> + [{timetrap, {seconds, 600}}]. + +%% ------------------------------------------------------------------- +%% Testsuite setup/teardown. +%% ------------------------------------------------------------------- + +init_per_suite(Config) -> + rabbit_ct_helpers:log_environment(), + Config1 = rabbit_ct_helpers:set_config(Config, [ + {rmq_nodename_suffix, ?MODULE}, + {rmq_extra_tcp_ports, [tcp_port_mqtt_extra, + tcp_port_mqtt_tls_extra]} + ]), + % see https://github.com/rabbitmq/rabbitmq-mqtt/issues/86 + RabbitConfig = {rabbit, [ + {default_user, "guest"}, + {default_pass, "guest"}, + {default_vhost, "/"}, + {default_permissions, [".*", ".*", ".*"]} + ]}, + rabbit_ct_helpers:run_setup_steps(Config1, + [ fun(Conf) -> merge_app_env(RabbitConfig, Conf) end ] ++ + rabbit_ct_broker_helpers:setup_steps() ++ + rabbit_ct_client_helpers:setup_steps()). + +merge_app_env(MqttConfig, Config) -> + rabbit_ct_helpers:merge_app_env(Config, MqttConfig). + +end_per_suite(Config) -> + rabbit_ct_helpers:run_teardown_steps(Config, + rabbit_ct_client_helpers:teardown_steps() ++ + rabbit_ct_broker_helpers:teardown_steps()). + +init_per_group(_, Config) -> + Config. + +end_per_group(_, Config) -> + Config. + +init_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_started(Config, Testcase). + +end_per_testcase(Testcase, Config) -> + rabbit_ct_helpers:testcase_finished(Config, Testcase). + + +%% ------------------------------------------------------------------- +%% Testsuite cases +%% ------------------------------------------------------------------- + +coerce_configuration_data(Config) -> + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + {ok, C} = emqttc:start_link(connection_opts(P)), + + emqttc:subscribe(C, <<"TopicA">>, qos0), + emqttc:publish(C, <<"TopicA">>, <<"Payload">>), + expect_publishes(<<"TopicA">>, [<<"Payload">>]), + + emqttc:disconnect(C), + ok. + +%% ------------------------------------------------------------------- +%% When a client is subscribed to TopicA/Device.Field and another +%% client publishes to TopicA/Device.Field the client should be +%% sent messages for the translated topic (TopicA/Device/Field) +%% ------------------------------------------------------------------- +should_translate_amqp2mqtt_on_publish(Config) -> + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + {ok, C} = emqttc:start_link(connection_opts(P)), + %% there's an active consumer + emqttc:subscribe(C, <<"TopicA/Device.Field">>, qos1), + emqttc:publish(C, <<"TopicA/Device.Field">>, <<"Payload">>, [{retain, true}]), + expect_publishes(<<"TopicA/Device/Field">>, [<<"Payload">>]), + emqttc:disconnect(C). + +%% ------------------------------------------------------------------- +%% If a client is publishes a retained message to TopicA/Device.Field and another +%% client subscribes to TopicA/Device.Field the client should be +%% sent the retained message for the translated topic (TopicA/Device/Field) +%% ------------------------------------------------------------------- +should_translate_amqp2mqtt_on_retention(Config) -> + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + {ok, C} = emqttc:start_link(connection_opts(P)), + %% publish with retain = true before a consumer comes around + emqttc:publish(C, <<"TopicA/Device.Field">>, <<"Payload">>, [{retain, true}]), + emqttc:subscribe(C, <<"TopicA/Device.Field">>, qos1), + expect_publishes(<<"TopicA/Device/Field">>, [<<"Payload">>]), + emqttc:disconnect(C). + +%% ------------------------------------------------------------------- +%% If a client is publishes a retained message to TopicA/Device.Field and another +%% client subscribes to TopicA/Device/Field the client should be +%% sent retained message for the translated topic (TopicA/Device/Field) +%% ------------------------------------------------------------------- +should_translate_amqp2mqtt_on_retention_search(Config) -> + P = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt), + {ok, C} = emqttc:start_link(connection_opts(P)), + emqttc:publish(C, <<"TopicA/Device.Field">>, <<"Payload">>, [{retain, true}]), + emqttc:subscribe(C, <<"TopicA/Device/Field">>, qos1), + expect_publishes(<<"TopicA/Device/Field">>, [<<"Payload">>]), + emqttc:disconnect(C). + +connection_opts(Port) -> + [{host, "localhost"}, + {port, Port}, + {client_id, <<"simpleClientRetainer">>}, + {proto_ver,3}, + {logger, info}, + {puback_timeout, 1}]. + + expect_publishes(_Topic, []) -> ok; + expect_publishes(Topic, [Payload | Rest]) -> + receive + {publish, Topic, Payload} -> expect_publishes(Topic, Rest) + after 1500 -> + throw({publish_not_delivered, Payload}) + end. diff --git a/deps/rabbitmq_mqtt/test/util_SUITE.erl b/deps/rabbitmq_mqtt/test/util_SUITE.erl new file mode 100644 index 0000000000..6694498595 --- /dev/null +++ b/deps/rabbitmq_mqtt/test/util_SUITE.erl @@ -0,0 +1,80 @@ +%% This Source Code Form is subject to the terms of the Mozilla Public +%% License, v. 2.0. If a copy of the MPL was not distributed with this +%% file, You can obtain one at https://mozilla.org/MPL/2.0/. +%% +%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved. + +-module(util_SUITE). +-compile([export_all]). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +all() -> + [ + {group, util_tests} + ]. + +groups() -> + [ + {util_tests, [parallel], [ + coerce_exchange, + coerce_vhost, + coerce_default_user, + coerce_default_pass, + mqtt_amqp_topic_translation + ] + } + ]. + +suite() -> + [{timetrap, {seconds, 60}}]. + +init_per_suite(Config) -> + ok = application:load(rabbitmq_mqtt), + Config. +end_per_suite(Config) -> + ok = application:unload(rabbitmq_mqtt), + Config. +init_per_group(_, Config) -> Config. +end_per_group(_, Config) -> Config. +init_per_testcase(_, Config) -> Config. +end_per_testcase(_, Config) -> Config. + +coerce_exchange(_) -> + ?assertEqual(<<"amq.topic">>, rabbit_mqtt_util:env(exchange)). + +coerce_vhost(_) -> + ?assertEqual(<<"/">>, rabbit_mqtt_util:env(vhost)). + +coerce_default_user(_) -> + ?assertEqual(<<"guest_user">>, rabbit_mqtt_util:env(default_user)). + +coerce_default_pass(_) -> + ?assertEqual(<<"guest_pass">>, rabbit_mqtt_util:env(default_pass)). + +mqtt_amqp_topic_translation(_) -> + ok = application:set_env(rabbitmq_mqtt, sparkplug, true), + {ok, {mqtt2amqp_fun, Mqtt2AmqpFun}, {amqp2mqtt_fun, Amqp2MqttFun}} = + rabbit_mqtt_util:get_topic_translation_funs(), + + T0 = "/foo/bar/+/baz", + T0_As_Amqp = <<".foo.bar.*.baz">>, + T0_As_Mqtt = <<"/foo/bar/+/baz">>, + ?assertEqual(T0_As_Amqp, Mqtt2AmqpFun(T0)), + ?assertEqual(T0_As_Mqtt, Amqp2MqttFun(T0_As_Amqp)), + + T1 = "spAv1.0/foo/bar/+/baz", + T1_As_Amqp = <<"spAv1___0.foo.bar.*.baz">>, + T1_As_Mqtt = <<"spAv1.0/foo/bar/+/baz">>, + ?assertEqual(T1_As_Amqp, Mqtt2AmqpFun(T1)), + ?assertEqual(T1_As_Mqtt, Amqp2MqttFun(T1_As_Amqp)), + + T2 = "spBv2.90/foo/bar/+/baz", + T2_As_Amqp = <<"spBv2___90.foo.bar.*.baz">>, + T2_As_Mqtt = <<"spBv2.90/foo/bar/+/baz">>, + ?assertEqual(T2_As_Amqp, Mqtt2AmqpFun(T2)), + ?assertEqual(T2_As_Mqtt, Amqp2MqttFun(T2_As_Amqp)), + + ok = application:unset_env(rabbitmq_mqtt, sparkplug), + ok. |