summaryrefslogtreecommitdiff
path: root/deps/rabbitmq_stomp
diff options
context:
space:
mode:
Diffstat (limited to 'deps/rabbitmq_stomp')
-rw-r--r--deps/rabbitmq_stomp/.gitignore29
-rw-r--r--deps/rabbitmq_stomp/.travis.yml61
-rw-r--r--deps/rabbitmq_stomp/CODE_OF_CONDUCT.md44
-rw-r--r--deps/rabbitmq_stomp/CONTRIBUTING.md99
-rw-r--r--deps/rabbitmq_stomp/LICENSE4
-rw-r--r--deps/rabbitmq_stomp/LICENSE-MPL-RabbitMQ373
-rw-r--r--deps/rabbitmq_stomp/Makefile46
-rw-r--r--deps/rabbitmq_stomp/NOTES71
-rw-r--r--deps/rabbitmq_stomp/README.md18
-rw-r--r--deps/rabbitmq_stomp/erlang.mk7808
-rwxr-xr-xdeps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_recv.pl13
-rwxr-xr-xdeps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_client.pl14
-rwxr-xr-xdeps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_service.pl21
-rwxr-xr-xdeps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send.pl9
-rwxr-xr-xdeps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send_many.pl11
-rwxr-xr-xdeps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_slow_recv.pl14
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/amq-sender.rb10
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/cb-receiver.rb8
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/cb-sender.rb6
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/cb-slow-receiver.rb13
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/exchange-receiver.rb15
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/exchange-sender.rb12
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/persistent-receiver.rb11
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/persistent-sender.rb13
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/temp-queue-client.rb9
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/temp-queue-service.rb15
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/topic-broadcast-receiver.rb11
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/topic-broadcast-with-unsubscribe.rb13
-rw-r--r--deps/rabbitmq_stomp/examples/ruby/topic-sender.rb7
-rw-r--r--deps/rabbitmq_stomp/include/rabbit_stomp.hrl42
-rw-r--r--deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl8
-rw-r--r--deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl73
-rw-r--r--deps/rabbitmq_stomp/priv/schema/rabbitmq_stomp.schema237
-rw-r--r--deps/rabbitmq_stomp/rabbitmq-components.mk359
-rw-r--r--deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl95
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp.erl131
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl50
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl25
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl266
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl46
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl1220
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl465
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl83
-rw-r--r--deps/rabbitmq_stomp/src/rabbit_stomp_util.erl418
-rw-r--r--deps/rabbitmq_stomp/test/amqqueue_SUITE.erl319
-rw-r--r--deps/rabbitmq_stomp/test/command_SUITE.erl127
-rw-r--r--deps/rabbitmq_stomp/test/config_schema_SUITE.erl55
-rw-r--r--deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/cacert.pem1
-rw-r--r--deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/cert.pem1
-rw-r--r--deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/key.pem1
-rw-r--r--deps/rabbitmq_stomp/test/config_schema_SUITE_data/rabbitmq_stomp.snippets97
-rw-r--r--deps/rabbitmq_stomp/test/connections_SUITE.erl160
-rw-r--r--deps/rabbitmq_stomp/test/frame_SUITE.erl191
-rw-r--r--deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl104
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE.erl72
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/deps/pika/Makefile27
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile27
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py252
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py42
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py259
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_options.py51
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py536
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py101
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/lifecycle.py187
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py331
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py87
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py40
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py41
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/ssl_lifecycle.py81
-rwxr-xr-xdeps/rabbitmq_stomp/test/python_SUITE_data/src/test.py21
-rwxr-xr-xdeps/rabbitmq_stomp/test/python_SUITE_data/src/test_connect_options.py15
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py26
-rwxr-xr-xdeps/rabbitmq_stomp/test/python_SUITE_data/src/test_ssl.py17
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py52
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py52
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py61
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py71
-rw-r--r--deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py62
-rw-r--r--deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl75
-rw-r--r--deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl80
-rw-r--r--deps/rabbitmq_stomp/test/src/test.config13
-rw-r--r--deps/rabbitmq_stomp/test/topic_SUITE.erl170
-rw-r--r--deps/rabbitmq_stomp/test/util_SUITE.erl242
83 files changed, 16443 insertions, 0 deletions
diff --git a/deps/rabbitmq_stomp/.gitignore b/deps/rabbitmq_stomp/.gitignore
new file mode 100644
index 0000000000..14dbfd18d3
--- /dev/null
+++ b/deps/rabbitmq_stomp/.gitignore
@@ -0,0 +1,29 @@
+.sw?
+.*.sw?
+*.beam
+/.erlang.mk/
+/cover/
+/debug/
+/deps/
+/doc/
+/ebin/
+/escript/
+/escript.lock
+/logs/
+/plugins/
+/plugins.lock
+/sbin/
+/sbin.lock
+/xrefr
+
+rabbitmq_stomp.d
+
+# Python testsuite.
+.python-version
+*.pyc
+test/python_SUITE_data/deps/pika/pika/
+test/python_SUITE_data/deps/pika/pika-*/
+test/python_SUITE_data/deps/stomppy/stomppy/
+test/python_SUITE_data/deps/stomppy/stomppy-git/
+
+test/config_schema_SUITE_data/schema/
diff --git a/deps/rabbitmq_stomp/.travis.yml b/deps/rabbitmq_stomp/.travis.yml
new file mode 100644
index 0000000000..e2fc1041e7
--- /dev/null
+++ b/deps/rabbitmq_stomp/.travis.yml
@@ -0,0 +1,61 @@
+# vim:sw=2:et:
+
+os: linux
+dist: xenial
+language: elixir
+notifications:
+ email:
+ recipients:
+ - alerts@rabbitmq.com
+ on_success: never
+ on_failure: always
+addons:
+ apt:
+ packages:
+ - awscli
+cache:
+ apt: true
+env:
+ global:
+ - secure: oLN5hBjMeKvT365DSoNLPPIZ9Bf9gxEgP3EJCZgPgKVvsE+4DhosdwYPxo1mNA2mq+6soizNGiW5LlD92UZonNgptl7UDwmVFWSHawEopYz67zFbcohEeHnKFr5bAapGgttdAHkfWH5nxv90O6OfEva0QBXkQb8O/hOdmYsVYOs=
+ - secure: efpmC/exFPHVbK4peAI4hAi7WKb5eUPgqhax95iDF54aVbt6SuO4h/t4gC2eiKU9el4YEccmapHfJyQ5FZSEw+aWS0wAXpmXlbIc8rxKuWbESeqvGKTcDmILfcLJYXt/B3pNzynRQCPUJkYo946j18+kfzB+cBHm7TV021hnt9w=
+
+ # $base_rmq_ref is used by rabbitmq-components.mk to select the
+ # appropriate branch for dependencies.
+ - base_rmq_ref=master
+
+elixir:
+ - '1.9'
+otp_release:
+ - '21.3'
+ - '22.2'
+
+install:
+ # This project being an Erlang one (we just set language to Elixir
+ # to ensure it is installed), we don't want Travis to run mix(1)
+ # automatically as it will break.
+ skip
+
+script:
+ # $current_rmq_ref is also used by rabbitmq-components.mk to select
+ # the appropriate branch for dependencies.
+ - make check-rabbitmq-components.mk
+ current_rmq_ref="${TRAVIS_PULL_REQUEST_BRANCH:-${TRAVIS_BRANCH}}"
+ - make xref
+ current_rmq_ref="${TRAVIS_PULL_REQUEST_BRANCH:-${TRAVIS_BRANCH}}"
+ - make tests
+ current_rmq_ref="${TRAVIS_PULL_REQUEST_BRANCH:-${TRAVIS_BRANCH}}"
+
+after_failure:
+ - |
+ cd "$TRAVIS_BUILD_DIR"
+ if test -d logs && test "$AWS_ACCESS_KEY_ID" && test "$AWS_SECRET_ACCESS_KEY"; then
+ archive_name="$(basename "$TRAVIS_REPO_SLUG")-$TRAVIS_JOB_NUMBER"
+
+ tar -c --transform "s/^logs/${archive_name}/" -f - logs | \
+ xz > "${archive_name}.tar.xz"
+
+ aws s3 cp "${archive_name}.tar.xz" s3://server-release-pipeline/travis-ci-logs/ \
+ --region eu-west-1 \
+ --acl public-read
+ fi
diff --git a/deps/rabbitmq_stomp/CODE_OF_CONDUCT.md b/deps/rabbitmq_stomp/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..08697906fd
--- /dev/null
+++ b/deps/rabbitmq_stomp/CODE_OF_CONDUCT.md
@@ -0,0 +1,44 @@
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project, and in the interest of fostering an open
+and welcoming community, we pledge to respect all people who contribute through reporting
+issues, posting feature requests, updating documentation, submitting pull requests or
+patches, and other activities.
+
+We are committed to making participation in this project a harassment-free experience for
+everyone, regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
+religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+ * The use of sexualized language or imagery
+ * Personal attacks
+ * Trolling or insulting/derogatory comments
+ * Public or private harassment
+ * Publishing other's private information, such as physical or electronic addresses,
+ without explicit permission
+ * Other unethical or unprofessional conduct
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments,
+commits, code, wiki edits, issues, and other contributions that are not aligned to this
+Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
+that they deem inappropriate, threatening, offensive, or harmful.
+
+By adopting this Code of Conduct, project maintainers commit themselves to fairly and
+consistently applying these principles to every aspect of managing this project. Project
+maintainers who do not follow or enforce the Code of Conduct may be permanently removed
+from the project team.
+
+This Code of Conduct applies both within project spaces and in public spaces when an
+individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
+contacting a project maintainer at [info@rabbitmq.com](mailto:info@rabbitmq.com). All complaints will
+be reviewed and investigated and will result in a response that is deemed necessary and
+appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
+with regard to the reporter of an incident.
+
+This Code of Conduct is adapted from the
+[Contributor Covenant](https://contributor-covenant.org), version 1.3.0, available at
+[contributor-covenant.org/version/1/3/0/](https://contributor-covenant.org/version/1/3/0/)
diff --git a/deps/rabbitmq_stomp/CONTRIBUTING.md b/deps/rabbitmq_stomp/CONTRIBUTING.md
new file mode 100644
index 0000000000..592e7ced57
--- /dev/null
+++ b/deps/rabbitmq_stomp/CONTRIBUTING.md
@@ -0,0 +1,99 @@
+Thank you for using RabbitMQ and for taking the time to contribute to the project.
+This document has two main parts:
+
+ * when and how to file GitHub issues for RabbitMQ projects
+ * how to submit pull requests
+
+They intend to save you and RabbitMQ maintainers some time, so please
+take a moment to read through them.
+
+## Overview
+
+### GitHub issues
+
+The RabbitMQ team uses GitHub issues for _specific actionable items_ that
+engineers can work on. This assumes the following:
+
+* GitHub issues are not used for questions, investigations, root cause
+ analysis, discussions of potential issues, etc (as defined by this team)
+* Enough information is provided by the reporter for maintainers to work with
+
+The team receives many questions through various venues every single
+day. Frequently, these questions do not include the necessary details
+the team needs to begin useful work. GitHub issues can very quickly
+turn into a something impossible to navigate and make sense
+of. Because of this, questions, investigations, root cause analysis,
+and discussions of potential features are all considered to be
+[mailing list][rmq-users] material. If you are unsure where to begin,
+the [RabbitMQ users mailing list][rmq-users] is the right place.
+
+Getting all the details necessary to reproduce an issue, make a
+conclusion or even form a hypothesis about what's happening can take a
+fair amount of time. Please help others help you by providing a way to
+reproduce the behavior you're observing, or at least sharing as much
+relevant information as possible on the [RabbitMQ users mailing
+list][rmq-users].
+
+Please provide versions of the software used:
+
+ * RabbitMQ server
+ * Erlang
+ * Operating system version (and distribution, if applicable)
+ * All client libraries used
+ * RabbitMQ plugins (if applicable)
+
+The following information greatly helps in investigating and reproducing issues:
+
+ * RabbitMQ server logs
+ * A code example or terminal transcript that can be used to reproduce
+ * Full exception stack traces (a single line message is not enough!)
+ * `rabbitmqctl report` and `rabbitmqctl environment` output
+ * Other relevant details about the environment and workload, e.g. a traffic capture
+ * Feel free to edit out hostnames and other potentially sensitive information.
+
+To make collecting much of this and other environment information, use
+the [`rabbitmq-collect-env`][rmq-collect-env] script. It will produce an archive with
+server logs, operating system logs, output of certain diagnostics commands and so on.
+Please note that **no effort is made to scrub any information that may be sensitive**.
+
+### Pull Requests
+
+RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions.
+Pull requests is the primary place of discussing code changes.
+
+Here's the recommended workflow:
+
+ * [Fork the repository][github-fork] or repositories you plan on contributing to. If multiple
+ repositories are involved in addressing the same issue, please use the same branch name
+ in each repository
+ * Create a branch with a descriptive name in the relevant repositories
+ * Make your changes, run tests (usually with `make tests`), commit with a
+ [descriptive message][git-commit-msgs], push to your fork
+ * Submit pull requests with an explanation what has been changed and **why**
+ * Submit a filled out and signed [Contributor Agreement][ca-agreement] if needed (see below)
+ * Be patient. We will get to your pull request eventually
+
+If what you are going to work on is a substantial change, please first
+ask the core team for their opinion on the [RabbitMQ users mailing list][rmq-users].
+
+## Code of Conduct
+
+See [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md).
+
+## Contributor Agreement
+
+If you want to contribute a non-trivial change, please submit a signed
+copy of our [Contributor Agreement][ca-agreement] around the time you
+submit your pull request. This will make it much easier (in some
+cases, possible) for the RabbitMQ team at Pivotal to merge your
+contribution.
+
+## Where to Ask Questions
+
+If something isn't clear, feel free to ask on our [mailing list][rmq-users].
+
+[rmq-collect-env]: https://github.com/rabbitmq/support-tools/blob/master/scripts/rabbitmq-collect-env
+[git-commit-msgs]: https://chris.beams.io/posts/git-commit/
+[rmq-users]: https://groups.google.com/forum/#!forum/rabbitmq-users
+[ca-agreement]: https://cla.pivotal.io/sign/rabbitmq
+[github-fork]: https://help.github.com/articles/fork-a-repo/
diff --git a/deps/rabbitmq_stomp/LICENSE b/deps/rabbitmq_stomp/LICENSE
new file mode 100644
index 0000000000..f2da65d175
--- /dev/null
+++ b/deps/rabbitmq_stomp/LICENSE
@@ -0,0 +1,4 @@
+This package is licensed under the MPL 2.0. For the MPL 2.0, please see LICENSE-MPL-RabbitMQ.
+
+If you have any questions regarding licensing, please contact us at
+info@rabbitmq.com.
diff --git a/deps/rabbitmq_stomp/LICENSE-MPL-RabbitMQ b/deps/rabbitmq_stomp/LICENSE-MPL-RabbitMQ
new file mode 100644
index 0000000000..14e2f777f6
--- /dev/null
+++ b/deps/rabbitmq_stomp/LICENSE-MPL-RabbitMQ
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+ means each individual or legal entity that creates, contributes to
+ the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+ means the combination of the Contributions of others (if any) used
+ by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+ means Source Code Form to which the initial Contributor has attached
+ the notice in Exhibit A, the Executable Form of such Source Code
+ Form, and Modifications of such Source Code Form, in each case
+ including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ (a) that the initial Contributor has attached the notice described
+ in Exhibit B to the Covered Software; or
+
+ (b) that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the
+ terms of a Secondary License.
+
+1.6. "Executable Form"
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+ means a work that combines Covered Software with other material, in
+ a separate file or files, that is not Covered Software.
+
+1.8. "License"
+ means this document.
+
+1.9. "Licensable"
+ means having the right to grant, to the maximum extent possible,
+ whether at the time of the initial grant or subsequently, any and
+ all of the rights conveyed by this License.
+
+1.10. "Modifications"
+ means any of the following:
+
+ (a) any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered
+ Software; or
+
+ (b) any new file in Source Code Form that contains any Covered
+ Software.
+
+1.11. "Patent Claims" of a Contributor
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the
+ License, by the making, using, selling, offering for sale, having
+ made, import, or transfer of either its Contributions or its
+ Contributor Version.
+
+1.12. "Secondary License"
+ means either the GNU General Public License, Version 2.0, the GNU
+ Lesser General Public License, Version 2.1, the GNU Affero General
+ Public License, Version 3.0, or any later versions of those
+ licenses.
+
+1.13. "Source Code Form"
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that
+ controls, is controlled by, or is under common control with You. For
+ purposes of this definition, "control" means (a) the power, direct
+ or indirect, to cause the direction or management of such entity,
+ whether by contract or otherwise, or (b) ownership of more than
+ fifty percent (50%) of the outstanding shares or beneficial
+ ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+ for sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+ or
+
+(b) for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+ Form, as described in Section 3.1, and You must inform recipients of
+ the Executable Form how they can obtain a copy of such Source Code
+ Form by reasonable means in a timely manner, at a charge no more
+ than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter
+ the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+* *
+* 6. Disclaimer of Warranty *
+* ------------------------- *
+* *
+* Covered Software is provided under this License on an "as is" *
+* basis, without warranty of any kind, either expressed, implied, or *
+* statutory, including, without limitation, warranties that the *
+* Covered Software is free of defects, merchantable, fit for a *
+* particular purpose or non-infringing. The entire risk as to the *
+* quality and performance of the Covered Software is with You. *
+* Should any Covered Software prove defective in any respect, You *
+* (not any Contributor) assume the cost of any necessary servicing, *
+* repair, or correction. This disclaimer of warranty constitutes an *
+* essential part of this License. No use of any Covered Software is *
+* authorized under this License except under this disclaimer. *
+* *
+************************************************************************
+
+************************************************************************
+* *
+* 7. Limitation of Liability *
+* -------------------------- *
+* *
+* Under no circumstances and under no legal theory, whether tort *
+* (including negligence), contract, or otherwise, shall any *
+* Contributor, or anyone who distributes Covered Software as *
+* permitted above, be liable to You for any direct, indirect, *
+* special, incidental, or consequential damages of any character *
+* including, without limitation, damages for lost profits, loss of *
+* goodwill, work stoppage, computer failure or malfunction, or any *
+* and all other commercial damages or losses, even if such party *
+* shall have been informed of the possibility of such damages. This *
+* limitation of liability shall not apply to liability for death or *
+* personal injury resulting from such party's negligence to the *
+* extent applicable law prohibits such limitation. Some *
+* jurisdictions do not allow the exclusion or limitation of *
+* incidental or consequential damages, so this exclusion and *
+* limitation may not apply to You. *
+* *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+ This Source Code Form is "Incompatible With Secondary Licenses", as
+ defined by the Mozilla Public License, v. 2.0.
diff --git a/deps/rabbitmq_stomp/Makefile b/deps/rabbitmq_stomp/Makefile
new file mode 100644
index 0000000000..a8a3e57c90
--- /dev/null
+++ b/deps/rabbitmq_stomp/Makefile
@@ -0,0 +1,46 @@
+PROJECT = rabbitmq_stomp
+PROJECT_DESCRIPTION = RabbitMQ STOMP plugin
+PROJECT_MOD = rabbit_stomp
+
+define PROJECT_ENV
+[
+ {default_user,
+ [{login, <<"guest">>},
+ {passcode, <<"guest">>}]},
+ {default_vhost, <<"/">>},
+ {default_topic_exchange, <<"amq.topic">>},
+ {default_nack_requeue, true},
+ {ssl_cert_login, false},
+ {implicit_connect, false},
+ {tcp_listeners, [61613]},
+ {ssl_listeners, []},
+ {num_tcp_acceptors, 10},
+ {num_ssl_acceptors, 10},
+ {tcp_listen_options, [{backlog, 128},
+ {nodelay, true}]},
+ %% see rabbitmq/rabbitmq-stomp#39
+ {trailing_lf, true},
+ %% see rabbitmq/rabbitmq-stomp#57
+ {hide_server_info, false},
+ {proxy_protocol, false}
+ ]
+endef
+
+define PROJECT_APP_EXTRA_KEYS
+ {broker_version_requirements, []}
+endef
+
+DEPS = ranch rabbit_common rabbit amqp_client
+TEST_DEPS = rabbitmq_ct_helpers rabbitmq_ct_client_helpers
+
+DEP_EARLY_PLUGINS = rabbit_common/mk/rabbitmq-early-plugin.mk
+DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk
+
+# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be
+# reviewed and merged.
+
+ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git
+ERLANG_MK_COMMIT = rabbitmq-tmp
+
+include rabbitmq-components.mk
+include erlang.mk
diff --git a/deps/rabbitmq_stomp/NOTES b/deps/rabbitmq_stomp/NOTES
new file mode 100644
index 0000000000..b4a9f02df4
--- /dev/null
+++ b/deps/rabbitmq_stomp/NOTES
@@ -0,0 +1,71 @@
+Comments from Sean Treadway, 2 June 2008, on the rabbitmq-discuss list:
+
+ - On naming, extensibility, and headers:
+
+ "STOMP looked like it was MQ agnostic and extensible while keeping
+ the core headers well defined (ack=client, message_id, etc...),
+ but my application was not MQ agnostic. Plus I saw some of the
+ ActiveMQ headers weren't available or necessary in RabbitMQ.
+
+ "Keeping the AMQP naming is the best way to piggy back on the AMQP
+ documentation. For those that need simple, transient queues, the
+ existing STOMP documentation would be sufficient."
+
+ ...
+
+ "I only have experience with RabbitMQ, so I'm fine with exposing
+ AMQP rather than try to come to some agreement over the extension
+ names of standard STOMP headers."
+
+ - On queue deletion over STOMP:
+
+ "Here, I would stick with the verbs defined in STOMP and extend the
+ verbs with headers. One possibility is to use UNSUBSCRIBE
+ messages to change the queue properties before sending the
+ 'basic.cancel' method. Another possibility is to change queue
+ properties on a SUBSCRIBE message. Neither seem nice to me. Third
+ option is to do nothing, and delete the queues outside of the
+ STOMP protocol"
+
+Comments from Darien Kindlund, 11 February 2009, on the rabbitmq-discuss list:
+
+ - On testing of connection establishment:
+
+ "[O]nce I switched each perl process over to re-using their
+ existing STOMP connection, things worked much, much better. As
+ such, I'm continuing development. In your unit testing, you may
+ want to include rapid connect/disconnect behavior or otherwise
+ explicitly warn developers to avoid this scenario."
+
+Comments from Novak Joe, 11 September 2008, on the rabbitmq-discuss list:
+
+ - On broadcast send:
+
+ "That said, I think it would also be useful to add to the STOMP
+ wiki page an additional note on broadcast SEND. In particular I
+ found that in order to send a message to a broadcast exchange it
+ needs look something like:
+
+ ---------------------------------
+ SEND
+ destination:x.mytopic
+ exchange:amq.topic
+
+ my message
+ \x00
+ --------------------------------
+
+ "However my initial newb intuition was that it should look more like:
+
+ ---------------------------------
+ SEND
+ destination:
+ exchange:amq.topic
+ routing_key:x.mytopic
+
+ my message
+ \x00
+ --------------------------------
+
+ "The ruby examples cleared this up but not before I experienced a
+ bit of confusion on the subject."
diff --git a/deps/rabbitmq_stomp/README.md b/deps/rabbitmq_stomp/README.md
new file mode 100644
index 0000000000..922793ba66
--- /dev/null
+++ b/deps/rabbitmq_stomp/README.md
@@ -0,0 +1,18 @@
+# RabbitMQ STOMP adapter
+
+The STOMP adapter is included in the RabbitMQ distribution. To enable
+it, use [rabbitmq-plugins](https://www.rabbitmq.com/man/rabbitmq-plugins.1.man.html):
+
+ rabbitmq-plugins enable rabbitmq_stomp
+
+## Supported STOMP Versions
+
+1.0 through 1.2.
+
+## Documentation
+
+[RabbitMQ STOMP plugin documentation](https://www.rabbitmq.com/stomp.html).
+
+## Continuous Integration
+
+[![Build Status](https://travis-ci.org/rabbitmq/rabbitmq-stomp.svg?branch=master)](https://travis-ci.org/rabbitmq/rabbitmq-stomp)
diff --git a/deps/rabbitmq_stomp/erlang.mk b/deps/rabbitmq_stomp/erlang.mk
new file mode 100644
index 0000000000..fce4be0b0a
--- /dev/null
+++ b/deps/rabbitmq_stomp/erlang.mk
@@ -0,0 +1,7808 @@
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+.PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk
+
+ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST)))
+export ERLANG_MK_FILENAME
+
+ERLANG_MK_VERSION = 2019.07.01-40-geb3e4b0
+ERLANG_MK_WITHOUT =
+
+# Make 3.81 and 3.82 are deprecated.
+
+ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.81)
+$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
+endif
+
+ifeq ($(MAKELEVEL)$(MAKE_VERSION),03.82)
+$(warning Please upgrade to GNU Make 4 or later: https://erlang.mk/guide/installation.html)
+endif
+
+# Core configuration.
+
+PROJECT ?= $(notdir $(CURDIR))
+PROJECT := $(strip $(PROJECT))
+
+PROJECT_VERSION ?= rolling
+PROJECT_MOD ?= $(PROJECT)_app
+PROJECT_ENV ?= []
+
+# Verbosity.
+
+V ?= 0
+
+verbose_0 = @
+verbose_2 = set -x;
+verbose = $(verbose_$(V))
+
+ifeq ($(V),3)
+SHELL := $(SHELL) -x
+endif
+
+gen_verbose_0 = @echo " GEN " $@;
+gen_verbose_2 = set -x;
+gen_verbose = $(gen_verbose_$(V))
+
+gen_verbose_esc_0 = @echo " GEN " $$@;
+gen_verbose_esc_2 = set -x;
+gen_verbose_esc = $(gen_verbose_esc_$(V))
+
+# Temporary files directory.
+
+ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk
+export ERLANG_MK_TMP
+
+# "erl" command.
+
+ERL = erl +A1 -noinput -boot no_dot_erlang
+
+# Platform detection.
+
+ifeq ($(PLATFORM),)
+UNAME_S := $(shell uname -s)
+
+ifeq ($(UNAME_S),Linux)
+PLATFORM = linux
+else ifeq ($(UNAME_S),Darwin)
+PLATFORM = darwin
+else ifeq ($(UNAME_S),SunOS)
+PLATFORM = solaris
+else ifeq ($(UNAME_S),GNU)
+PLATFORM = gnu
+else ifeq ($(UNAME_S),FreeBSD)
+PLATFORM = freebsd
+else ifeq ($(UNAME_S),NetBSD)
+PLATFORM = netbsd
+else ifeq ($(UNAME_S),OpenBSD)
+PLATFORM = openbsd
+else ifeq ($(UNAME_S),DragonFly)
+PLATFORM = dragonfly
+else ifeq ($(shell uname -o),Msys)
+PLATFORM = msys2
+else
+$(error Unable to detect platform. Please open a ticket with the output of uname -a.)
+endif
+
+export PLATFORM
+endif
+
+# Core targets.
+
+all:: deps app rel
+
+# Noop to avoid a Make warning when there's nothing to do.
+rel::
+ $(verbose) :
+
+relup:: deps app
+
+check:: tests
+
+clean:: clean-crashdump
+
+clean-crashdump:
+ifneq ($(wildcard erl_crash.dump),)
+ $(gen_verbose) rm -f erl_crash.dump
+endif
+
+distclean:: clean distclean-tmp
+
+$(ERLANG_MK_TMP):
+ $(verbose) mkdir -p $(ERLANG_MK_TMP)
+
+distclean-tmp:
+ $(gen_verbose) rm -rf $(ERLANG_MK_TMP)
+
+help::
+ $(verbose) printf "%s\n" \
+ "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \
+ "Copyright (c) 2013-2016 Loïc Hoguin <essen@ninenines.eu>" \
+ "" \
+ "Usage: [V=1] $(MAKE) [target]..." \
+ "" \
+ "Core targets:" \
+ " all Run deps, app and rel targets in that order" \
+ " app Compile the project" \
+ " deps Fetch dependencies (if needed) and compile them" \
+ " fetch-deps Fetch dependencies recursively (if needed) without compiling them" \
+ " list-deps List dependencies recursively on stdout" \
+ " search q=... Search for a package in the built-in index" \
+ " rel Build a release for this project, if applicable" \
+ " docs Build the documentation for this project" \
+ " install-docs Install the man pages for this project" \
+ " check Compile and run all tests and analysis for this project" \
+ " tests Run the tests for this project" \
+ " clean Delete temporary and output files from most targets" \
+ " distclean Delete all temporary and output files" \
+ " help Display this help and exit" \
+ " erlang-mk Update erlang.mk to the latest version"
+
+# Core functions.
+
+empty :=
+space := $(empty) $(empty)
+tab := $(empty) $(empty)
+comma := ,
+
+define newline
+
+
+endef
+
+define comma_list
+$(subst $(space),$(comma),$(strip $(1)))
+endef
+
+define escape_dquotes
+$(subst ",\",$1)
+endef
+
+# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy.
+define erlang
+$(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk
+endef
+
+ifeq ($(PLATFORM),msys2)
+core_native_path = $(shell cygpath -m $1)
+else
+core_native_path = $1
+endif
+
+core_http_get = curl -Lf$(if $(filter-out 0,$(V)),,s)o $(call core_native_path,$1) $2
+
+core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1)))
+
+# We skip files that contain spaces because they end up causing issues.
+core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) \( -type l -o -type f \) -name $(subst *,\*,$2) | grep -v " "))
+
+core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1)))))))))))))))))))))))))))
+
+core_ls = $(filter-out $(1),$(shell echo $(1)))
+
+# @todo Use a solution that does not require using perl.
+core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2)
+
+define core_render
+ printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2)
+endef
+
+# Automated update.
+
+ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk
+ERLANG_MK_COMMIT ?=
+ERLANG_MK_BUILD_CONFIG ?= build.config
+ERLANG_MK_BUILD_DIR ?= .erlang.mk.build
+
+erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT)
+erlang-mk:
+ifdef ERLANG_MK_COMMIT
+ $(verbose) git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+ $(verbose) cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT)
+else
+ $(verbose) git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR)
+endif
+ $(verbose) if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi
+ $(gen_verbose) $(MAKE) --no-print-directory -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' UPGRADE=1
+ $(verbose) cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk
+ $(verbose) rm -rf $(ERLANG_MK_BUILD_DIR)
+ $(verbose) rm -rf $(ERLANG_MK_TMP)
+
+# The erlang.mk package index is bundled in the default erlang.mk build.
+# Search for the string "copyright" to skip to the rest of the code.
+
+# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-kerl
+
+KERL_INSTALL_DIR ?= $(HOME)/erlang
+
+ifeq ($(strip $(KERL)),)
+KERL := $(ERLANG_MK_TMP)/kerl/kerl
+endif
+
+KERL_DIR = $(ERLANG_MK_TMP)/kerl
+
+export KERL
+
+KERL_GIT ?= https://github.com/kerl/kerl
+KERL_COMMIT ?= master
+
+KERL_MAKEFLAGS ?=
+
+OTP_GIT ?= https://github.com/erlang/otp
+
+define kerl_otp_target
+$(KERL_INSTALL_DIR)/$(1): $(KERL)
+ $(verbose) if [ ! -d $$@ ]; then \
+ MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1); \
+ $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1); \
+ fi
+endef
+
+define kerl_hipe_target
+$(KERL_INSTALL_DIR)/$1-native: $(KERL)
+ $(verbose) if [ ! -d $$@ ]; then \
+ KERL_CONFIGURE_OPTIONS=--enable-native-libs \
+ MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native; \
+ $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native; \
+ fi
+endef
+
+$(KERL): $(KERL_DIR)
+
+$(KERL_DIR): | $(ERLANG_MK_TMP)
+ $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl
+ $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT)
+ $(verbose) chmod +x $(KERL)
+
+distclean:: distclean-kerl
+
+distclean-kerl:
+ $(gen_verbose) rm -rf $(KERL_DIR)
+
+# Allow users to select which version of Erlang/OTP to use for a project.
+
+ifneq ($(strip $(LATEST_ERLANG_OTP)),)
+# In some environments it is necessary to filter out master.
+ERLANG_OTP := $(notdir $(lastword $(sort\
+ $(filter-out $(KERL_INSTALL_DIR)/master $(KERL_INSTALL_DIR)/OTP_R%,\
+ $(filter-out %-rc1 %-rc2 %-rc3,$(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))))
+endif
+
+ERLANG_OTP ?=
+ERLANG_HIPE ?=
+
+# Use kerl to enforce a specific Erlang/OTP version for a project.
+ifneq ($(strip $(ERLANG_OTP)),)
+export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH)
+SHELL := env PATH=$(PATH) $(SHELL)
+$(eval $(call kerl_otp_target,$(ERLANG_OTP)))
+
+# Build Erlang/OTP only if it doesn't already exist.
+ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),)
+$(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...)
+$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2)
+endif
+
+else
+# Same for a HiPE enabled VM.
+ifneq ($(strip $(ERLANG_HIPE)),)
+export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH)
+SHELL := env PATH=$(PATH) $(SHELL)
+$(eval $(call kerl_hipe_target,$(ERLANG_HIPE)))
+
+# Build Erlang/OTP only if it doesn't already exist.
+ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native)$(BUILD_ERLANG_OTP),)
+$(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...)
+$(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2)
+endif
+
+endif
+endif
+
+PACKAGES += aberth
+pkg_aberth_name = aberth
+pkg_aberth_description = Generic BERT-RPC server in Erlang
+pkg_aberth_homepage = https://github.com/a13x/aberth
+pkg_aberth_fetch = git
+pkg_aberth_repo = https://github.com/a13x/aberth
+pkg_aberth_commit = master
+
+PACKAGES += active
+pkg_active_name = active
+pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running
+pkg_active_homepage = https://github.com/proger/active
+pkg_active_fetch = git
+pkg_active_repo = https://github.com/proger/active
+pkg_active_commit = master
+
+PACKAGES += actordb_core
+pkg_actordb_core_name = actordb_core
+pkg_actordb_core_description = ActorDB main source
+pkg_actordb_core_homepage = http://www.actordb.com/
+pkg_actordb_core_fetch = git
+pkg_actordb_core_repo = https://github.com/biokoda/actordb_core
+pkg_actordb_core_commit = master
+
+PACKAGES += actordb_thrift
+pkg_actordb_thrift_name = actordb_thrift
+pkg_actordb_thrift_description = Thrift API for ActorDB
+pkg_actordb_thrift_homepage = http://www.actordb.com/
+pkg_actordb_thrift_fetch = git
+pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift
+pkg_actordb_thrift_commit = master
+
+PACKAGES += aleppo
+pkg_aleppo_name = aleppo
+pkg_aleppo_description = Alternative Erlang Pre-Processor
+pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo
+pkg_aleppo_fetch = git
+pkg_aleppo_repo = https://github.com/ErlyORM/aleppo
+pkg_aleppo_commit = master
+
+PACKAGES += alog
+pkg_alog_name = alog
+pkg_alog_description = Simply the best logging framework for Erlang
+pkg_alog_homepage = https://github.com/siberian-fast-food/alogger
+pkg_alog_fetch = git
+pkg_alog_repo = https://github.com/siberian-fast-food/alogger
+pkg_alog_commit = master
+
+PACKAGES += amqp_client
+pkg_amqp_client_name = amqp_client
+pkg_amqp_client_description = RabbitMQ Erlang AMQP client
+pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html
+pkg_amqp_client_fetch = git
+pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git
+pkg_amqp_client_commit = master
+
+PACKAGES += annotations
+pkg_annotations_name = annotations
+pkg_annotations_description = Simple code instrumentation utilities
+pkg_annotations_homepage = https://github.com/hyperthunk/annotations
+pkg_annotations_fetch = git
+pkg_annotations_repo = https://github.com/hyperthunk/annotations
+pkg_annotations_commit = master
+
+PACKAGES += antidote
+pkg_antidote_name = antidote
+pkg_antidote_description = Large-scale computation without synchronisation
+pkg_antidote_homepage = https://syncfree.lip6.fr/
+pkg_antidote_fetch = git
+pkg_antidote_repo = https://github.com/SyncFree/antidote
+pkg_antidote_commit = master
+
+PACKAGES += apns
+pkg_apns_name = apns
+pkg_apns_description = Apple Push Notification Server for Erlang
+pkg_apns_homepage = http://inaka.github.com/apns4erl
+pkg_apns_fetch = git
+pkg_apns_repo = https://github.com/inaka/apns4erl
+pkg_apns_commit = master
+
+PACKAGES += asciideck
+pkg_asciideck_name = asciideck
+pkg_asciideck_description = Asciidoc for Erlang.
+pkg_asciideck_homepage = https://ninenines.eu
+pkg_asciideck_fetch = git
+pkg_asciideck_repo = https://github.com/ninenines/asciideck
+pkg_asciideck_commit = master
+
+PACKAGES += azdht
+pkg_azdht_name = azdht
+pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang
+pkg_azdht_homepage = https://github.com/arcusfelis/azdht
+pkg_azdht_fetch = git
+pkg_azdht_repo = https://github.com/arcusfelis/azdht
+pkg_azdht_commit = master
+
+PACKAGES += backoff
+pkg_backoff_name = backoff
+pkg_backoff_description = Simple exponential backoffs in Erlang
+pkg_backoff_homepage = https://github.com/ferd/backoff
+pkg_backoff_fetch = git
+pkg_backoff_repo = https://github.com/ferd/backoff
+pkg_backoff_commit = master
+
+PACKAGES += barrel_tcp
+pkg_barrel_tcp_name = barrel_tcp
+pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang.
+pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_fetch = git
+pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp
+pkg_barrel_tcp_commit = master
+
+PACKAGES += basho_bench
+pkg_basho_bench_name = basho_bench
+pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for.
+pkg_basho_bench_homepage = https://github.com/basho/basho_bench
+pkg_basho_bench_fetch = git
+pkg_basho_bench_repo = https://github.com/basho/basho_bench
+pkg_basho_bench_commit = master
+
+PACKAGES += bcrypt
+pkg_bcrypt_name = bcrypt
+pkg_bcrypt_description = Bcrypt Erlang / C library
+pkg_bcrypt_homepage = https://github.com/erlangpack/bcrypt
+pkg_bcrypt_fetch = git
+pkg_bcrypt_repo = https://github.com/erlangpack/bcrypt.git
+pkg_bcrypt_commit = master
+
+PACKAGES += beam
+pkg_beam_name = beam
+pkg_beam_description = BEAM emulator written in Erlang
+pkg_beam_homepage = https://github.com/tonyrog/beam
+pkg_beam_fetch = git
+pkg_beam_repo = https://github.com/tonyrog/beam
+pkg_beam_commit = master
+
+PACKAGES += beanstalk
+pkg_beanstalk_name = beanstalk
+pkg_beanstalk_description = An Erlang client for beanstalkd
+pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_fetch = git
+pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk
+pkg_beanstalk_commit = master
+
+PACKAGES += bear
+pkg_bear_name = bear
+pkg_bear_description = a set of statistics functions for erlang
+pkg_bear_homepage = https://github.com/boundary/bear
+pkg_bear_fetch = git
+pkg_bear_repo = https://github.com/boundary/bear
+pkg_bear_commit = master
+
+PACKAGES += bertconf
+pkg_bertconf_name = bertconf
+pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded
+pkg_bertconf_homepage = https://github.com/ferd/bertconf
+pkg_bertconf_fetch = git
+pkg_bertconf_repo = https://github.com/ferd/bertconf
+pkg_bertconf_commit = master
+
+PACKAGES += bifrost
+pkg_bifrost_name = bifrost
+pkg_bifrost_description = Erlang FTP Server Framework
+pkg_bifrost_homepage = https://github.com/thorstadt/bifrost
+pkg_bifrost_fetch = git
+pkg_bifrost_repo = https://github.com/thorstadt/bifrost
+pkg_bifrost_commit = master
+
+PACKAGES += binpp
+pkg_binpp_name = binpp
+pkg_binpp_description = Erlang Binary Pretty Printer
+pkg_binpp_homepage = https://github.com/jtendo/binpp
+pkg_binpp_fetch = git
+pkg_binpp_repo = https://github.com/jtendo/binpp
+pkg_binpp_commit = master
+
+PACKAGES += bisect
+pkg_bisect_name = bisect
+pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang
+pkg_bisect_homepage = https://github.com/knutin/bisect
+pkg_bisect_fetch = git
+pkg_bisect_repo = https://github.com/knutin/bisect
+pkg_bisect_commit = master
+
+PACKAGES += bitcask
+pkg_bitcask_name = bitcask
+pkg_bitcask_description = because you need another a key/value storage engine
+pkg_bitcask_homepage = https://github.com/basho/bitcask
+pkg_bitcask_fetch = git
+pkg_bitcask_repo = https://github.com/basho/bitcask
+pkg_bitcask_commit = develop
+
+PACKAGES += bitstore
+pkg_bitstore_name = bitstore
+pkg_bitstore_description = A document based ontology development environment
+pkg_bitstore_homepage = https://github.com/bdionne/bitstore
+pkg_bitstore_fetch = git
+pkg_bitstore_repo = https://github.com/bdionne/bitstore
+pkg_bitstore_commit = master
+
+PACKAGES += bootstrap
+pkg_bootstrap_name = bootstrap
+pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application.
+pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap
+pkg_bootstrap_fetch = git
+pkg_bootstrap_repo = https://github.com/schlagert/bootstrap
+pkg_bootstrap_commit = master
+
+PACKAGES += boss
+pkg_boss_name = boss
+pkg_boss_description = Erlang web MVC, now featuring Comet
+pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_fetch = git
+pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss
+pkg_boss_commit = master
+
+PACKAGES += boss_db
+pkg_boss_db_name = boss_db
+pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang
+pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db
+pkg_boss_db_fetch = git
+pkg_boss_db_repo = https://github.com/ErlyORM/boss_db
+pkg_boss_db_commit = master
+
+PACKAGES += brod
+pkg_brod_name = brod
+pkg_brod_description = Kafka client in Erlang
+pkg_brod_homepage = https://github.com/klarna/brod
+pkg_brod_fetch = git
+pkg_brod_repo = https://github.com/klarna/brod.git
+pkg_brod_commit = master
+
+PACKAGES += bson
+pkg_bson_name = bson
+pkg_bson_description = BSON documents in Erlang, see bsonspec.org
+pkg_bson_homepage = https://github.com/comtihon/bson-erlang
+pkg_bson_fetch = git
+pkg_bson_repo = https://github.com/comtihon/bson-erlang
+pkg_bson_commit = master
+
+PACKAGES += bullet
+pkg_bullet_name = bullet
+pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy.
+pkg_bullet_homepage = http://ninenines.eu
+pkg_bullet_fetch = git
+pkg_bullet_repo = https://github.com/ninenines/bullet
+pkg_bullet_commit = master
+
+PACKAGES += cache
+pkg_cache_name = cache
+pkg_cache_description = Erlang in-memory cache
+pkg_cache_homepage = https://github.com/fogfish/cache
+pkg_cache_fetch = git
+pkg_cache_repo = https://github.com/fogfish/cache
+pkg_cache_commit = master
+
+PACKAGES += cake
+pkg_cake_name = cake
+pkg_cake_description = Really simple terminal colorization
+pkg_cake_homepage = https://github.com/darach/cake-erl
+pkg_cake_fetch = git
+pkg_cake_repo = https://github.com/darach/cake-erl
+pkg_cake_commit = master
+
+PACKAGES += carotene
+pkg_carotene_name = carotene
+pkg_carotene_description = Real-time server
+pkg_carotene_homepage = https://github.com/carotene/carotene
+pkg_carotene_fetch = git
+pkg_carotene_repo = https://github.com/carotene/carotene
+pkg_carotene_commit = master
+
+PACKAGES += cberl
+pkg_cberl_name = cberl
+pkg_cberl_description = NIF based Erlang bindings for Couchbase
+pkg_cberl_homepage = https://github.com/chitika/cberl
+pkg_cberl_fetch = git
+pkg_cberl_repo = https://github.com/chitika/cberl
+pkg_cberl_commit = master
+
+PACKAGES += cecho
+pkg_cecho_name = cecho
+pkg_cecho_description = An ncurses library for Erlang
+pkg_cecho_homepage = https://github.com/mazenharake/cecho
+pkg_cecho_fetch = git
+pkg_cecho_repo = https://github.com/mazenharake/cecho
+pkg_cecho_commit = master
+
+PACKAGES += cferl
+pkg_cferl_name = cferl
+pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client
+pkg_cferl_homepage = https://github.com/ddossot/cferl
+pkg_cferl_fetch = git
+pkg_cferl_repo = https://github.com/ddossot/cferl
+pkg_cferl_commit = master
+
+PACKAGES += chaos_monkey
+pkg_chaos_monkey_name = chaos_monkey
+pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes.
+pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_fetch = git
+pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey
+pkg_chaos_monkey_commit = master
+
+PACKAGES += check_node
+pkg_check_node_name = check_node
+pkg_check_node_description = Nagios Scripts for monitoring Riak
+pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios
+pkg_check_node_fetch = git
+pkg_check_node_repo = https://github.com/basho-labs/riak_nagios
+pkg_check_node_commit = master
+
+PACKAGES += chronos
+pkg_chronos_name = chronos
+pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests.
+pkg_chronos_homepage = https://github.com/lehoff/chronos
+pkg_chronos_fetch = git
+pkg_chronos_repo = https://github.com/lehoff/chronos
+pkg_chronos_commit = master
+
+PACKAGES += chumak
+pkg_chumak_name = chumak
+pkg_chumak_description = Pure Erlang implementation of ZeroMQ Message Transport Protocol.
+pkg_chumak_homepage = http://choven.ca
+pkg_chumak_fetch = git
+pkg_chumak_repo = https://github.com/chovencorp/chumak
+pkg_chumak_commit = master
+
+PACKAGES += cl
+pkg_cl_name = cl
+pkg_cl_description = OpenCL binding for Erlang
+pkg_cl_homepage = https://github.com/tonyrog/cl
+pkg_cl_fetch = git
+pkg_cl_repo = https://github.com/tonyrog/cl
+pkg_cl_commit = master
+
+PACKAGES += clique
+pkg_clique_name = clique
+pkg_clique_description = CLI Framework for Erlang
+pkg_clique_homepage = https://github.com/basho/clique
+pkg_clique_fetch = git
+pkg_clique_repo = https://github.com/basho/clique
+pkg_clique_commit = develop
+
+PACKAGES += cloudi_core
+pkg_cloudi_core_name = cloudi_core
+pkg_cloudi_core_description = CloudI internal service runtime
+pkg_cloudi_core_homepage = http://cloudi.org/
+pkg_cloudi_core_fetch = git
+pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core
+pkg_cloudi_core_commit = master
+
+PACKAGES += cloudi_service_api_requests
+pkg_cloudi_service_api_requests_name = cloudi_service_api_requests
+pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support)
+pkg_cloudi_service_api_requests_homepage = http://cloudi.org/
+pkg_cloudi_service_api_requests_fetch = git
+pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests
+pkg_cloudi_service_api_requests_commit = master
+
+PACKAGES += cloudi_service_db
+pkg_cloudi_service_db_name = cloudi_service_db
+pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic)
+pkg_cloudi_service_db_homepage = http://cloudi.org/
+pkg_cloudi_service_db_fetch = git
+pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db
+pkg_cloudi_service_db_commit = master
+
+PACKAGES += cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service
+pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_fetch = git
+pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra
+pkg_cloudi_service_db_cassandra_commit = master
+
+PACKAGES += cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service
+pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_cassandra_cql_fetch = git
+pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql
+pkg_cloudi_service_db_cassandra_cql_commit = master
+
+PACKAGES += cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service
+pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/
+pkg_cloudi_service_db_couchdb_fetch = git
+pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb
+pkg_cloudi_service_db_couchdb_commit = master
+
+PACKAGES += cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service
+pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/
+pkg_cloudi_service_db_elasticsearch_fetch = git
+pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch
+pkg_cloudi_service_db_elasticsearch_commit = master
+
+PACKAGES += cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_description = memcached CloudI Service
+pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/
+pkg_cloudi_service_db_memcached_fetch = git
+pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached
+pkg_cloudi_service_db_memcached_commit = master
+
+PACKAGES += cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_description = MySQL CloudI Service
+pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_mysql_fetch = git
+pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql
+pkg_cloudi_service_db_mysql_commit = master
+
+PACKAGES += cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service
+pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/
+pkg_cloudi_service_db_pgsql_fetch = git
+pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql
+pkg_cloudi_service_db_pgsql_commit = master
+
+PACKAGES += cloudi_service_db_riak
+pkg_cloudi_service_db_riak_name = cloudi_service_db_riak
+pkg_cloudi_service_db_riak_description = Riak CloudI Service
+pkg_cloudi_service_db_riak_homepage = http://cloudi.org/
+pkg_cloudi_service_db_riak_fetch = git
+pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak
+pkg_cloudi_service_db_riak_commit = master
+
+PACKAGES += cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service
+pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/
+pkg_cloudi_service_db_tokyotyrant_fetch = git
+pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant
+pkg_cloudi_service_db_tokyotyrant_commit = master
+
+PACKAGES += cloudi_service_filesystem
+pkg_cloudi_service_filesystem_name = cloudi_service_filesystem
+pkg_cloudi_service_filesystem_description = Filesystem CloudI Service
+pkg_cloudi_service_filesystem_homepage = http://cloudi.org/
+pkg_cloudi_service_filesystem_fetch = git
+pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem
+pkg_cloudi_service_filesystem_commit = master
+
+PACKAGES += cloudi_service_http_client
+pkg_cloudi_service_http_client_name = cloudi_service_http_client
+pkg_cloudi_service_http_client_description = HTTP client CloudI Service
+pkg_cloudi_service_http_client_homepage = http://cloudi.org/
+pkg_cloudi_service_http_client_fetch = git
+pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client
+pkg_cloudi_service_http_client_commit = master
+
+PACKAGES += cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service
+pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/
+pkg_cloudi_service_http_cowboy_fetch = git
+pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy
+pkg_cloudi_service_http_cowboy_commit = master
+
+PACKAGES += cloudi_service_http_elli
+pkg_cloudi_service_http_elli_name = cloudi_service_http_elli
+pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service
+pkg_cloudi_service_http_elli_homepage = http://cloudi.org/
+pkg_cloudi_service_http_elli_fetch = git
+pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli
+pkg_cloudi_service_http_elli_commit = master
+
+PACKAGES += cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service
+pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/
+pkg_cloudi_service_map_reduce_fetch = git
+pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce
+pkg_cloudi_service_map_reduce_commit = master
+
+PACKAGES += cloudi_service_oauth1
+pkg_cloudi_service_oauth1_name = cloudi_service_oauth1
+pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service
+pkg_cloudi_service_oauth1_homepage = http://cloudi.org/
+pkg_cloudi_service_oauth1_fetch = git
+pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1
+pkg_cloudi_service_oauth1_commit = master
+
+PACKAGES += cloudi_service_queue
+pkg_cloudi_service_queue_name = cloudi_service_queue
+pkg_cloudi_service_queue_description = Persistent Queue Service
+pkg_cloudi_service_queue_homepage = http://cloudi.org/
+pkg_cloudi_service_queue_fetch = git
+pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue
+pkg_cloudi_service_queue_commit = master
+
+PACKAGES += cloudi_service_quorum
+pkg_cloudi_service_quorum_name = cloudi_service_quorum
+pkg_cloudi_service_quorum_description = CloudI Quorum Service
+pkg_cloudi_service_quorum_homepage = http://cloudi.org/
+pkg_cloudi_service_quorum_fetch = git
+pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum
+pkg_cloudi_service_quorum_commit = master
+
+PACKAGES += cloudi_service_router
+pkg_cloudi_service_router_name = cloudi_service_router
+pkg_cloudi_service_router_description = CloudI Router Service
+pkg_cloudi_service_router_homepage = http://cloudi.org/
+pkg_cloudi_service_router_fetch = git
+pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router
+pkg_cloudi_service_router_commit = master
+
+PACKAGES += cloudi_service_tcp
+pkg_cloudi_service_tcp_name = cloudi_service_tcp
+pkg_cloudi_service_tcp_description = TCP CloudI Service
+pkg_cloudi_service_tcp_homepage = http://cloudi.org/
+pkg_cloudi_service_tcp_fetch = git
+pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp
+pkg_cloudi_service_tcp_commit = master
+
+PACKAGES += cloudi_service_timers
+pkg_cloudi_service_timers_name = cloudi_service_timers
+pkg_cloudi_service_timers_description = Timers CloudI Service
+pkg_cloudi_service_timers_homepage = http://cloudi.org/
+pkg_cloudi_service_timers_fetch = git
+pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers
+pkg_cloudi_service_timers_commit = master
+
+PACKAGES += cloudi_service_udp
+pkg_cloudi_service_udp_name = cloudi_service_udp
+pkg_cloudi_service_udp_description = UDP CloudI Service
+pkg_cloudi_service_udp_homepage = http://cloudi.org/
+pkg_cloudi_service_udp_fetch = git
+pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp
+pkg_cloudi_service_udp_commit = master
+
+PACKAGES += cloudi_service_validate
+pkg_cloudi_service_validate_name = cloudi_service_validate
+pkg_cloudi_service_validate_description = CloudI Validate Service
+pkg_cloudi_service_validate_homepage = http://cloudi.org/
+pkg_cloudi_service_validate_fetch = git
+pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate
+pkg_cloudi_service_validate_commit = master
+
+PACKAGES += cloudi_service_zeromq
+pkg_cloudi_service_zeromq_name = cloudi_service_zeromq
+pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service
+pkg_cloudi_service_zeromq_homepage = http://cloudi.org/
+pkg_cloudi_service_zeromq_fetch = git
+pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq
+pkg_cloudi_service_zeromq_commit = master
+
+PACKAGES += cluster_info
+pkg_cluster_info_name = cluster_info
+pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app
+pkg_cluster_info_homepage = https://github.com/basho/cluster_info
+pkg_cluster_info_fetch = git
+pkg_cluster_info_repo = https://github.com/basho/cluster_info
+pkg_cluster_info_commit = master
+
+PACKAGES += color
+pkg_color_name = color
+pkg_color_description = ANSI colors for your Erlang
+pkg_color_homepage = https://github.com/julianduque/erlang-color
+pkg_color_fetch = git
+pkg_color_repo = https://github.com/julianduque/erlang-color
+pkg_color_commit = master
+
+PACKAGES += confetti
+pkg_confetti_name = confetti
+pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids
+pkg_confetti_homepage = https://github.com/jtendo/confetti
+pkg_confetti_fetch = git
+pkg_confetti_repo = https://github.com/jtendo/confetti
+pkg_confetti_commit = master
+
+PACKAGES += couchbeam
+pkg_couchbeam_name = couchbeam
+pkg_couchbeam_description = Apache CouchDB client in Erlang
+pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam
+pkg_couchbeam_fetch = git
+pkg_couchbeam_repo = https://github.com/benoitc/couchbeam
+pkg_couchbeam_commit = master
+
+PACKAGES += covertool
+pkg_covertool_name = covertool
+pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports
+pkg_covertool_homepage = https://github.com/idubrov/covertool
+pkg_covertool_fetch = git
+pkg_covertool_repo = https://github.com/idubrov/covertool
+pkg_covertool_commit = master
+
+PACKAGES += cowboy
+pkg_cowboy_name = cowboy
+pkg_cowboy_description = Small, fast and modular HTTP server.
+pkg_cowboy_homepage = http://ninenines.eu
+pkg_cowboy_fetch = git
+pkg_cowboy_repo = https://github.com/ninenines/cowboy
+pkg_cowboy_commit = 1.0.4
+
+PACKAGES += cowdb
+pkg_cowdb_name = cowdb
+pkg_cowdb_description = Pure Key/Value database library for Erlang Applications
+pkg_cowdb_homepage = https://github.com/refuge/cowdb
+pkg_cowdb_fetch = git
+pkg_cowdb_repo = https://github.com/refuge/cowdb
+pkg_cowdb_commit = master
+
+PACKAGES += cowlib
+pkg_cowlib_name = cowlib
+pkg_cowlib_description = Support library for manipulating Web protocols.
+pkg_cowlib_homepage = http://ninenines.eu
+pkg_cowlib_fetch = git
+pkg_cowlib_repo = https://github.com/ninenines/cowlib
+pkg_cowlib_commit = 1.0.2
+
+PACKAGES += cpg
+pkg_cpg_name = cpg
+pkg_cpg_description = CloudI Process Groups
+pkg_cpg_homepage = https://github.com/okeuday/cpg
+pkg_cpg_fetch = git
+pkg_cpg_repo = https://github.com/okeuday/cpg
+pkg_cpg_commit = master
+
+PACKAGES += cqerl
+pkg_cqerl_name = cqerl
+pkg_cqerl_description = Native Erlang CQL client for Cassandra
+pkg_cqerl_homepage = https://matehat.github.io/cqerl/
+pkg_cqerl_fetch = git
+pkg_cqerl_repo = https://github.com/matehat/cqerl
+pkg_cqerl_commit = master
+
+PACKAGES += cr
+pkg_cr_name = cr
+pkg_cr_description = Chain Replication
+pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm
+pkg_cr_fetch = git
+pkg_cr_repo = https://github.com/spawnproc/cr
+pkg_cr_commit = master
+
+PACKAGES += cuttlefish
+pkg_cuttlefish_name = cuttlefish
+pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me?
+pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish
+pkg_cuttlefish_fetch = git
+pkg_cuttlefish_repo = https://github.com/basho/cuttlefish
+pkg_cuttlefish_commit = master
+
+PACKAGES += damocles
+pkg_damocles_name = damocles
+pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box.
+pkg_damocles_homepage = https://github.com/lostcolony/damocles
+pkg_damocles_fetch = git
+pkg_damocles_repo = https://github.com/lostcolony/damocles
+pkg_damocles_commit = master
+
+PACKAGES += debbie
+pkg_debbie_name = debbie
+pkg_debbie_description = .DEB Built In Erlang
+pkg_debbie_homepage = https://github.com/crownedgrouse/debbie
+pkg_debbie_fetch = git
+pkg_debbie_repo = https://github.com/crownedgrouse/debbie
+pkg_debbie_commit = master
+
+PACKAGES += decimal
+pkg_decimal_name = decimal
+pkg_decimal_description = An Erlang decimal arithmetic library
+pkg_decimal_homepage = https://github.com/tim/erlang-decimal
+pkg_decimal_fetch = git
+pkg_decimal_repo = https://github.com/tim/erlang-decimal
+pkg_decimal_commit = master
+
+PACKAGES += detergent
+pkg_detergent_name = detergent
+pkg_detergent_description = An emulsifying Erlang SOAP library
+pkg_detergent_homepage = https://github.com/devinus/detergent
+pkg_detergent_fetch = git
+pkg_detergent_repo = https://github.com/devinus/detergent
+pkg_detergent_commit = master
+
+PACKAGES += detest
+pkg_detest_name = detest
+pkg_detest_description = Tool for running tests on a cluster of erlang nodes
+pkg_detest_homepage = https://github.com/biokoda/detest
+pkg_detest_fetch = git
+pkg_detest_repo = https://github.com/biokoda/detest
+pkg_detest_commit = master
+
+PACKAGES += dh_date
+pkg_dh_date_name = dh_date
+pkg_dh_date_description = Date formatting / parsing library for erlang
+pkg_dh_date_homepage = https://github.com/daleharvey/dh_date
+pkg_dh_date_fetch = git
+pkg_dh_date_repo = https://github.com/daleharvey/dh_date
+pkg_dh_date_commit = master
+
+PACKAGES += dirbusterl
+pkg_dirbusterl_name = dirbusterl
+pkg_dirbusterl_description = DirBuster successor in Erlang
+pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_fetch = git
+pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl
+pkg_dirbusterl_commit = master
+
+PACKAGES += dispcount
+pkg_dispcount_name = dispcount
+pkg_dispcount_description = Erlang task dispatcher based on ETS counters.
+pkg_dispcount_homepage = https://github.com/ferd/dispcount
+pkg_dispcount_fetch = git
+pkg_dispcount_repo = https://github.com/ferd/dispcount
+pkg_dispcount_commit = master
+
+PACKAGES += dlhttpc
+pkg_dlhttpc_name = dlhttpc
+pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints
+pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_fetch = git
+pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc
+pkg_dlhttpc_commit = master
+
+PACKAGES += dns
+pkg_dns_name = dns
+pkg_dns_description = Erlang DNS library
+pkg_dns_homepage = https://github.com/aetrion/dns_erlang
+pkg_dns_fetch = git
+pkg_dns_repo = https://github.com/aetrion/dns_erlang
+pkg_dns_commit = master
+
+PACKAGES += dnssd
+pkg_dnssd_name = dnssd
+pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation
+pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_fetch = git
+pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang
+pkg_dnssd_commit = master
+
+PACKAGES += dynamic_compile
+pkg_dynamic_compile_name = dynamic_compile
+pkg_dynamic_compile_description = compile and load erlang modules from string input
+pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_fetch = git
+pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile
+pkg_dynamic_compile_commit = master
+
+PACKAGES += e2
+pkg_e2_name = e2
+pkg_e2_description = Library to simply writing correct OTP applications.
+pkg_e2_homepage = http://e2project.org
+pkg_e2_fetch = git
+pkg_e2_repo = https://github.com/gar1t/e2
+pkg_e2_commit = master
+
+PACKAGES += eamf
+pkg_eamf_name = eamf
+pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang
+pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_fetch = git
+pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf
+pkg_eamf_commit = master
+
+PACKAGES += eavro
+pkg_eavro_name = eavro
+pkg_eavro_description = Apache Avro encoder/decoder
+pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_fetch = git
+pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro
+pkg_eavro_commit = master
+
+PACKAGES += ecapnp
+pkg_ecapnp_name = ecapnp
+pkg_ecapnp_description = Cap'n Proto library for Erlang
+pkg_ecapnp_homepage = https://github.com/kaos/ecapnp
+pkg_ecapnp_fetch = git
+pkg_ecapnp_repo = https://github.com/kaos/ecapnp
+pkg_ecapnp_commit = master
+
+PACKAGES += econfig
+pkg_econfig_name = econfig
+pkg_econfig_description = simple Erlang config handler using INI files
+pkg_econfig_homepage = https://github.com/benoitc/econfig
+pkg_econfig_fetch = git
+pkg_econfig_repo = https://github.com/benoitc/econfig
+pkg_econfig_commit = master
+
+PACKAGES += edate
+pkg_edate_name = edate
+pkg_edate_description = date manipulation library for erlang
+pkg_edate_homepage = https://github.com/dweldon/edate
+pkg_edate_fetch = git
+pkg_edate_repo = https://github.com/dweldon/edate
+pkg_edate_commit = master
+
+PACKAGES += edgar
+pkg_edgar_name = edgar
+pkg_edgar_description = Erlang Does GNU AR
+pkg_edgar_homepage = https://github.com/crownedgrouse/edgar
+pkg_edgar_fetch = git
+pkg_edgar_repo = https://github.com/crownedgrouse/edgar
+pkg_edgar_commit = master
+
+PACKAGES += edis
+pkg_edis_name = edis
+pkg_edis_description = An Erlang implementation of Redis KV Store
+pkg_edis_homepage = http://inaka.github.com/edis/
+pkg_edis_fetch = git
+pkg_edis_repo = https://github.com/inaka/edis
+pkg_edis_commit = master
+
+PACKAGES += edns
+pkg_edns_name = edns
+pkg_edns_description = Erlang/OTP DNS server
+pkg_edns_homepage = https://github.com/hcvst/erlang-dns
+pkg_edns_fetch = git
+pkg_edns_repo = https://github.com/hcvst/erlang-dns
+pkg_edns_commit = master
+
+PACKAGES += edown
+pkg_edown_name = edown
+pkg_edown_description = EDoc extension for generating Github-flavored Markdown
+pkg_edown_homepage = https://github.com/uwiger/edown
+pkg_edown_fetch = git
+pkg_edown_repo = https://github.com/uwiger/edown
+pkg_edown_commit = master
+
+PACKAGES += eep
+pkg_eep_name = eep
+pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy
+pkg_eep_homepage = https://github.com/virtan/eep
+pkg_eep_fetch = git
+pkg_eep_repo = https://github.com/virtan/eep
+pkg_eep_commit = master
+
+PACKAGES += eep_app
+pkg_eep_app_name = eep_app
+pkg_eep_app_description = Embedded Event Processing
+pkg_eep_app_homepage = https://github.com/darach/eep-erl
+pkg_eep_app_fetch = git
+pkg_eep_app_repo = https://github.com/darach/eep-erl
+pkg_eep_app_commit = master
+
+PACKAGES += efene
+pkg_efene_name = efene
+pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX
+pkg_efene_homepage = https://github.com/efene/efene
+pkg_efene_fetch = git
+pkg_efene_repo = https://github.com/efene/efene
+pkg_efene_commit = master
+
+PACKAGES += egeoip
+pkg_egeoip_name = egeoip
+pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database.
+pkg_egeoip_homepage = https://github.com/mochi/egeoip
+pkg_egeoip_fetch = git
+pkg_egeoip_repo = https://github.com/mochi/egeoip
+pkg_egeoip_commit = master
+
+PACKAGES += ehsa
+pkg_ehsa_name = ehsa
+pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules
+pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_fetch = hg
+pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa
+pkg_ehsa_commit = default
+
+PACKAGES += ej
+pkg_ej_name = ej
+pkg_ej_description = Helper module for working with Erlang terms representing JSON
+pkg_ej_homepage = https://github.com/seth/ej
+pkg_ej_fetch = git
+pkg_ej_repo = https://github.com/seth/ej
+pkg_ej_commit = master
+
+PACKAGES += ejabberd
+pkg_ejabberd_name = ejabberd
+pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform
+pkg_ejabberd_homepage = https://github.com/processone/ejabberd
+pkg_ejabberd_fetch = git
+pkg_ejabberd_repo = https://github.com/processone/ejabberd
+pkg_ejabberd_commit = master
+
+PACKAGES += ejwt
+pkg_ejwt_name = ejwt
+pkg_ejwt_description = erlang library for JSON Web Token
+pkg_ejwt_homepage = https://github.com/artefactop/ejwt
+pkg_ejwt_fetch = git
+pkg_ejwt_repo = https://github.com/artefactop/ejwt
+pkg_ejwt_commit = master
+
+PACKAGES += ekaf
+pkg_ekaf_name = ekaf
+pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang.
+pkg_ekaf_homepage = https://github.com/helpshift/ekaf
+pkg_ekaf_fetch = git
+pkg_ekaf_repo = https://github.com/helpshift/ekaf
+pkg_ekaf_commit = master
+
+PACKAGES += elarm
+pkg_elarm_name = elarm
+pkg_elarm_description = Alarm Manager for Erlang.
+pkg_elarm_homepage = https://github.com/esl/elarm
+pkg_elarm_fetch = git
+pkg_elarm_repo = https://github.com/esl/elarm
+pkg_elarm_commit = master
+
+PACKAGES += eleveldb
+pkg_eleveldb_name = eleveldb
+pkg_eleveldb_description = Erlang LevelDB API
+pkg_eleveldb_homepage = https://github.com/basho/eleveldb
+pkg_eleveldb_fetch = git
+pkg_eleveldb_repo = https://github.com/basho/eleveldb
+pkg_eleveldb_commit = master
+
+PACKAGES += elixir
+pkg_elixir_name = elixir
+pkg_elixir_description = Elixir is a dynamic, functional language designed for building scalable and maintainable applications
+pkg_elixir_homepage = https://elixir-lang.org/
+pkg_elixir_fetch = git
+pkg_elixir_repo = https://github.com/elixir-lang/elixir
+pkg_elixir_commit = master
+
+PACKAGES += elli
+pkg_elli_name = elli
+pkg_elli_description = Simple, robust and performant Erlang web server
+pkg_elli_homepage = https://github.com/elli-lib/elli
+pkg_elli_fetch = git
+pkg_elli_repo = https://github.com/elli-lib/elli
+pkg_elli_commit = master
+
+PACKAGES += elvis
+pkg_elvis_name = elvis
+pkg_elvis_description = Erlang Style Reviewer
+pkg_elvis_homepage = https://github.com/inaka/elvis
+pkg_elvis_fetch = git
+pkg_elvis_repo = https://github.com/inaka/elvis
+pkg_elvis_commit = master
+
+PACKAGES += emagick
+pkg_emagick_name = emagick
+pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool.
+pkg_emagick_homepage = https://github.com/kivra/emagick
+pkg_emagick_fetch = git
+pkg_emagick_repo = https://github.com/kivra/emagick
+pkg_emagick_commit = master
+
+PACKAGES += emysql
+pkg_emysql_name = emysql
+pkg_emysql_description = Stable, pure Erlang MySQL driver.
+pkg_emysql_homepage = https://github.com/Eonblast/Emysql
+pkg_emysql_fetch = git
+pkg_emysql_repo = https://github.com/Eonblast/Emysql
+pkg_emysql_commit = master
+
+PACKAGES += enm
+pkg_enm_name = enm
+pkg_enm_description = Erlang driver for nanomsg
+pkg_enm_homepage = https://github.com/basho/enm
+pkg_enm_fetch = git
+pkg_enm_repo = https://github.com/basho/enm
+pkg_enm_commit = master
+
+PACKAGES += entop
+pkg_entop_name = entop
+pkg_entop_description = A top-like tool for monitoring an Erlang node
+pkg_entop_homepage = https://github.com/mazenharake/entop
+pkg_entop_fetch = git
+pkg_entop_repo = https://github.com/mazenharake/entop
+pkg_entop_commit = master
+
+PACKAGES += epcap
+pkg_epcap_name = epcap
+pkg_epcap_description = Erlang packet capture interface using pcap
+pkg_epcap_homepage = https://github.com/msantos/epcap
+pkg_epcap_fetch = git
+pkg_epcap_repo = https://github.com/msantos/epcap
+pkg_epcap_commit = master
+
+PACKAGES += eper
+pkg_eper_name = eper
+pkg_eper_description = Erlang performance and debugging tools.
+pkg_eper_homepage = https://github.com/massemanet/eper
+pkg_eper_fetch = git
+pkg_eper_repo = https://github.com/massemanet/eper
+pkg_eper_commit = master
+
+PACKAGES += epgsql
+pkg_epgsql_name = epgsql
+pkg_epgsql_description = Erlang PostgreSQL client library.
+pkg_epgsql_homepage = https://github.com/epgsql/epgsql
+pkg_epgsql_fetch = git
+pkg_epgsql_repo = https://github.com/epgsql/epgsql
+pkg_epgsql_commit = master
+
+PACKAGES += episcina
+pkg_episcina_name = episcina
+pkg_episcina_description = A simple non intrusive resource pool for connections
+pkg_episcina_homepage = https://github.com/erlware/episcina
+pkg_episcina_fetch = git
+pkg_episcina_repo = https://github.com/erlware/episcina
+pkg_episcina_commit = master
+
+PACKAGES += eplot
+pkg_eplot_name = eplot
+pkg_eplot_description = A plot engine written in erlang.
+pkg_eplot_homepage = https://github.com/psyeugenic/eplot
+pkg_eplot_fetch = git
+pkg_eplot_repo = https://github.com/psyeugenic/eplot
+pkg_eplot_commit = master
+
+PACKAGES += epocxy
+pkg_epocxy_name = epocxy
+pkg_epocxy_description = Erlang Patterns of Concurrency
+pkg_epocxy_homepage = https://github.com/duomark/epocxy
+pkg_epocxy_fetch = git
+pkg_epocxy_repo = https://github.com/duomark/epocxy
+pkg_epocxy_commit = master
+
+PACKAGES += epubnub
+pkg_epubnub_name = epubnub
+pkg_epubnub_description = Erlang PubNub API
+pkg_epubnub_homepage = https://github.com/tsloughter/epubnub
+pkg_epubnub_fetch = git
+pkg_epubnub_repo = https://github.com/tsloughter/epubnub
+pkg_epubnub_commit = master
+
+PACKAGES += eqm
+pkg_eqm_name = eqm
+pkg_eqm_description = Erlang pub sub with supply-demand channels
+pkg_eqm_homepage = https://github.com/loucash/eqm
+pkg_eqm_fetch = git
+pkg_eqm_repo = https://github.com/loucash/eqm
+pkg_eqm_commit = master
+
+PACKAGES += eredis
+pkg_eredis_name = eredis
+pkg_eredis_description = Erlang Redis client
+pkg_eredis_homepage = https://github.com/wooga/eredis
+pkg_eredis_fetch = git
+pkg_eredis_repo = https://github.com/wooga/eredis
+pkg_eredis_commit = master
+
+PACKAGES += eredis_pool
+pkg_eredis_pool_name = eredis_pool
+pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy.
+pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_fetch = git
+pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool
+pkg_eredis_pool_commit = master
+
+PACKAGES += erl_streams
+pkg_erl_streams_name = erl_streams
+pkg_erl_streams_description = Streams in Erlang
+pkg_erl_streams_homepage = https://github.com/epappas/erl_streams
+pkg_erl_streams_fetch = git
+pkg_erl_streams_repo = https://github.com/epappas/erl_streams
+pkg_erl_streams_commit = master
+
+PACKAGES += erlang_cep
+pkg_erlang_cep_name = erlang_cep
+pkg_erlang_cep_description = A basic CEP package written in erlang
+pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_fetch = git
+pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep
+pkg_erlang_cep_commit = master
+
+PACKAGES += erlang_js
+pkg_erlang_js_name = erlang_js
+pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime.
+pkg_erlang_js_homepage = https://github.com/basho/erlang_js
+pkg_erlang_js_fetch = git
+pkg_erlang_js_repo = https://github.com/basho/erlang_js
+pkg_erlang_js_commit = master
+
+PACKAGES += erlang_localtime
+pkg_erlang_localtime_name = erlang_localtime
+pkg_erlang_localtime_description = Erlang library for conversion from one local time to another
+pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_fetch = git
+pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime
+pkg_erlang_localtime_commit = master
+
+PACKAGES += erlang_smtp
+pkg_erlang_smtp_name = erlang_smtp
+pkg_erlang_smtp_description = Erlang SMTP and POP3 server code.
+pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_fetch = git
+pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp
+pkg_erlang_smtp_commit = master
+
+PACKAGES += erlang_term
+pkg_erlang_term_name = erlang_term
+pkg_erlang_term_description = Erlang Term Info
+pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term
+pkg_erlang_term_fetch = git
+pkg_erlang_term_repo = https://github.com/okeuday/erlang_term
+pkg_erlang_term_commit = master
+
+PACKAGES += erlastic_search
+pkg_erlastic_search_name = erlastic_search
+pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface.
+pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_fetch = git
+pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search
+pkg_erlastic_search_commit = master
+
+PACKAGES += erlasticsearch
+pkg_erlasticsearch_name = erlasticsearch
+pkg_erlasticsearch_description = Erlang thrift interface to elastic_search
+pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_fetch = git
+pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch
+pkg_erlasticsearch_commit = master
+
+PACKAGES += erlbrake
+pkg_erlbrake_name = erlbrake
+pkg_erlbrake_description = Erlang Airbrake notification client
+pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_fetch = git
+pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake
+pkg_erlbrake_commit = master
+
+PACKAGES += erlcloud
+pkg_erlcloud_name = erlcloud
+pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB)
+pkg_erlcloud_homepage = https://github.com/gleber/erlcloud
+pkg_erlcloud_fetch = git
+pkg_erlcloud_repo = https://github.com/gleber/erlcloud
+pkg_erlcloud_commit = master
+
+PACKAGES += erlcron
+pkg_erlcron_name = erlcron
+pkg_erlcron_description = Erlang cronish system
+pkg_erlcron_homepage = https://github.com/erlware/erlcron
+pkg_erlcron_fetch = git
+pkg_erlcron_repo = https://github.com/erlware/erlcron
+pkg_erlcron_commit = master
+
+PACKAGES += erldb
+pkg_erldb_name = erldb
+pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang
+pkg_erldb_homepage = http://erldb.org
+pkg_erldb_fetch = git
+pkg_erldb_repo = https://github.com/erldb/erldb
+pkg_erldb_commit = master
+
+PACKAGES += erldis
+pkg_erldis_name = erldis
+pkg_erldis_description = redis erlang client library
+pkg_erldis_homepage = https://github.com/cstar/erldis
+pkg_erldis_fetch = git
+pkg_erldis_repo = https://github.com/cstar/erldis
+pkg_erldis_commit = master
+
+PACKAGES += erldns
+pkg_erldns_name = erldns
+pkg_erldns_description = DNS server, in erlang.
+pkg_erldns_homepage = https://github.com/aetrion/erl-dns
+pkg_erldns_fetch = git
+pkg_erldns_repo = https://github.com/aetrion/erl-dns
+pkg_erldns_commit = master
+
+PACKAGES += erldocker
+pkg_erldocker_name = erldocker
+pkg_erldocker_description = Docker Remote API client for Erlang
+pkg_erldocker_homepage = https://github.com/proger/erldocker
+pkg_erldocker_fetch = git
+pkg_erldocker_repo = https://github.com/proger/erldocker
+pkg_erldocker_commit = master
+
+PACKAGES += erlfsmon
+pkg_erlfsmon_name = erlfsmon
+pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX
+pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon
+pkg_erlfsmon_fetch = git
+pkg_erlfsmon_repo = https://github.com/proger/erlfsmon
+pkg_erlfsmon_commit = master
+
+PACKAGES += erlgit
+pkg_erlgit_name = erlgit
+pkg_erlgit_description = Erlang convenience wrapper around git executable
+pkg_erlgit_homepage = https://github.com/gleber/erlgit
+pkg_erlgit_fetch = git
+pkg_erlgit_repo = https://github.com/gleber/erlgit
+pkg_erlgit_commit = master
+
+PACKAGES += erlguten
+pkg_erlguten_name = erlguten
+pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang.
+pkg_erlguten_homepage = https://github.com/richcarl/erlguten
+pkg_erlguten_fetch = git
+pkg_erlguten_repo = https://github.com/richcarl/erlguten
+pkg_erlguten_commit = master
+
+PACKAGES += erlmc
+pkg_erlmc_name = erlmc
+pkg_erlmc_description = Erlang memcached binary protocol client
+pkg_erlmc_homepage = https://github.com/jkvor/erlmc
+pkg_erlmc_fetch = git
+pkg_erlmc_repo = https://github.com/jkvor/erlmc
+pkg_erlmc_commit = master
+
+PACKAGES += erlmongo
+pkg_erlmongo_name = erlmongo
+pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support
+pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_fetch = git
+pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo
+pkg_erlmongo_commit = master
+
+PACKAGES += erlog
+pkg_erlog_name = erlog
+pkg_erlog_description = Prolog interpreter in and for Erlang
+pkg_erlog_homepage = https://github.com/rvirding/erlog
+pkg_erlog_fetch = git
+pkg_erlog_repo = https://github.com/rvirding/erlog
+pkg_erlog_commit = master
+
+PACKAGES += erlpass
+pkg_erlpass_name = erlpass
+pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever.
+pkg_erlpass_homepage = https://github.com/ferd/erlpass
+pkg_erlpass_fetch = git
+pkg_erlpass_repo = https://github.com/ferd/erlpass
+pkg_erlpass_commit = master
+
+PACKAGES += erlport
+pkg_erlport_name = erlport
+pkg_erlport_description = ErlPort - connect Erlang to other languages
+pkg_erlport_homepage = https://github.com/hdima/erlport
+pkg_erlport_fetch = git
+pkg_erlport_repo = https://github.com/hdima/erlport
+pkg_erlport_commit = master
+
+PACKAGES += erlsh
+pkg_erlsh_name = erlsh
+pkg_erlsh_description = Erlang shell tools
+pkg_erlsh_homepage = https://github.com/proger/erlsh
+pkg_erlsh_fetch = git
+pkg_erlsh_repo = https://github.com/proger/erlsh
+pkg_erlsh_commit = master
+
+PACKAGES += erlsha2
+pkg_erlsha2_name = erlsha2
+pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs.
+pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2
+pkg_erlsha2_fetch = git
+pkg_erlsha2_repo = https://github.com/vinoski/erlsha2
+pkg_erlsha2_commit = master
+
+PACKAGES += erlsom
+pkg_erlsom_name = erlsom
+pkg_erlsom_description = XML parser for Erlang
+pkg_erlsom_homepage = https://github.com/willemdj/erlsom
+pkg_erlsom_fetch = git
+pkg_erlsom_repo = https://github.com/willemdj/erlsom
+pkg_erlsom_commit = master
+
+PACKAGES += erlubi
+pkg_erlubi_name = erlubi
+pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer)
+pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi
+pkg_erlubi_fetch = git
+pkg_erlubi_repo = https://github.com/krestenkrab/erlubi
+pkg_erlubi_commit = master
+
+PACKAGES += erlvolt
+pkg_erlvolt_name = erlvolt
+pkg_erlvolt_description = VoltDB Erlang Client Driver
+pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_fetch = git
+pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang
+pkg_erlvolt_commit = master
+
+PACKAGES += erlware_commons
+pkg_erlware_commons_name = erlware_commons
+pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components.
+pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_fetch = git
+pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons
+pkg_erlware_commons_commit = master
+
+PACKAGES += erlydtl
+pkg_erlydtl_name = erlydtl
+pkg_erlydtl_description = Django Template Language for Erlang.
+pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_fetch = git
+pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl
+pkg_erlydtl_commit = master
+
+PACKAGES += errd
+pkg_errd_name = errd
+pkg_errd_description = Erlang RRDTool library
+pkg_errd_homepage = https://github.com/archaelus/errd
+pkg_errd_fetch = git
+pkg_errd_repo = https://github.com/archaelus/errd
+pkg_errd_commit = master
+
+PACKAGES += erserve
+pkg_erserve_name = erserve
+pkg_erserve_description = Erlang/Rserve communication interface
+pkg_erserve_homepage = https://github.com/del/erserve
+pkg_erserve_fetch = git
+pkg_erserve_repo = https://github.com/del/erserve
+pkg_erserve_commit = master
+
+PACKAGES += erwa
+pkg_erwa_name = erwa
+pkg_erwa_description = A WAMP router and client written in Erlang.
+pkg_erwa_homepage = https://github.com/bwegh/erwa
+pkg_erwa_fetch = git
+pkg_erwa_repo = https://github.com/bwegh/erwa
+pkg_erwa_commit = master
+
+PACKAGES += escalus
+pkg_escalus_name = escalus
+pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers
+pkg_escalus_homepage = https://github.com/esl/escalus
+pkg_escalus_fetch = git
+pkg_escalus_repo = https://github.com/esl/escalus
+pkg_escalus_commit = master
+
+PACKAGES += esh_mk
+pkg_esh_mk_name = esh_mk
+pkg_esh_mk_description = esh template engine plugin for erlang.mk
+pkg_esh_mk_homepage = https://github.com/crownedgrouse/esh.mk
+pkg_esh_mk_fetch = git
+pkg_esh_mk_repo = https://github.com/crownedgrouse/esh.mk.git
+pkg_esh_mk_commit = master
+
+PACKAGES += espec
+pkg_espec_name = espec
+pkg_espec_description = ESpec: Behaviour driven development framework for Erlang
+pkg_espec_homepage = https://github.com/lucaspiller/espec
+pkg_espec_fetch = git
+pkg_espec_repo = https://github.com/lucaspiller/espec
+pkg_espec_commit = master
+
+PACKAGES += estatsd
+pkg_estatsd_name = estatsd
+pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite
+pkg_estatsd_homepage = https://github.com/RJ/estatsd
+pkg_estatsd_fetch = git
+pkg_estatsd_repo = https://github.com/RJ/estatsd
+pkg_estatsd_commit = master
+
+PACKAGES += etap
+pkg_etap_name = etap
+pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output.
+pkg_etap_homepage = https://github.com/ngerakines/etap
+pkg_etap_fetch = git
+pkg_etap_repo = https://github.com/ngerakines/etap
+pkg_etap_commit = master
+
+PACKAGES += etest
+pkg_etest_name = etest
+pkg_etest_description = A lightweight, convention over configuration test framework for Erlang
+pkg_etest_homepage = https://github.com/wooga/etest
+pkg_etest_fetch = git
+pkg_etest_repo = https://github.com/wooga/etest
+pkg_etest_commit = master
+
+PACKAGES += etest_http
+pkg_etest_http_name = etest_http
+pkg_etest_http_description = etest Assertions around HTTP (client-side)
+pkg_etest_http_homepage = https://github.com/wooga/etest_http
+pkg_etest_http_fetch = git
+pkg_etest_http_repo = https://github.com/wooga/etest_http
+pkg_etest_http_commit = master
+
+PACKAGES += etoml
+pkg_etoml_name = etoml
+pkg_etoml_description = TOML language erlang parser
+pkg_etoml_homepage = https://github.com/kalta/etoml
+pkg_etoml_fetch = git
+pkg_etoml_repo = https://github.com/kalta/etoml
+pkg_etoml_commit = master
+
+PACKAGES += eunit
+pkg_eunit_name = eunit
+pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository.
+pkg_eunit_homepage = https://github.com/richcarl/eunit
+pkg_eunit_fetch = git
+pkg_eunit_repo = https://github.com/richcarl/eunit
+pkg_eunit_commit = master
+
+PACKAGES += eunit_formatters
+pkg_eunit_formatters_name = eunit_formatters
+pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better.
+pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_fetch = git
+pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters
+pkg_eunit_formatters_commit = master
+
+PACKAGES += euthanasia
+pkg_euthanasia_name = euthanasia
+pkg_euthanasia_description = Merciful killer for your Erlang processes
+pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_fetch = git
+pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia
+pkg_euthanasia_commit = master
+
+PACKAGES += evum
+pkg_evum_name = evum
+pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM
+pkg_evum_homepage = https://github.com/msantos/evum
+pkg_evum_fetch = git
+pkg_evum_repo = https://github.com/msantos/evum
+pkg_evum_commit = master
+
+PACKAGES += exec
+pkg_exec_name = erlexec
+pkg_exec_description = Execute and control OS processes from Erlang/OTP.
+pkg_exec_homepage = http://saleyn.github.com/erlexec
+pkg_exec_fetch = git
+pkg_exec_repo = https://github.com/saleyn/erlexec
+pkg_exec_commit = master
+
+PACKAGES += exml
+pkg_exml_name = exml
+pkg_exml_description = XML parsing library in Erlang
+pkg_exml_homepage = https://github.com/paulgray/exml
+pkg_exml_fetch = git
+pkg_exml_repo = https://github.com/paulgray/exml
+pkg_exml_commit = master
+
+PACKAGES += exometer
+pkg_exometer_name = exometer
+pkg_exometer_description = Basic measurement objects and probe behavior
+pkg_exometer_homepage = https://github.com/Feuerlabs/exometer
+pkg_exometer_fetch = git
+pkg_exometer_repo = https://github.com/Feuerlabs/exometer
+pkg_exometer_commit = master
+
+PACKAGES += exs1024
+pkg_exs1024_name = exs1024
+pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang.
+pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024
+pkg_exs1024_fetch = git
+pkg_exs1024_repo = https://github.com/jj1bdx/exs1024
+pkg_exs1024_commit = master
+
+PACKAGES += exs64
+pkg_exs64_name = exs64
+pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang.
+pkg_exs64_homepage = https://github.com/jj1bdx/exs64
+pkg_exs64_fetch = git
+pkg_exs64_repo = https://github.com/jj1bdx/exs64
+pkg_exs64_commit = master
+
+PACKAGES += exsplus116
+pkg_exsplus116_name = exsplus116
+pkg_exsplus116_description = Xorshift116plus for Erlang
+pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_fetch = git
+pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116
+pkg_exsplus116_commit = master
+
+PACKAGES += exsplus128
+pkg_exsplus128_name = exsplus128
+pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang.
+pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_fetch = git
+pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128
+pkg_exsplus128_commit = master
+
+PACKAGES += ezmq
+pkg_ezmq_name = ezmq
+pkg_ezmq_description = zMQ implemented in Erlang
+pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_fetch = git
+pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq
+pkg_ezmq_commit = master
+
+PACKAGES += ezmtp
+pkg_ezmtp_name = ezmtp
+pkg_ezmtp_description = ZMTP protocol in pure Erlang.
+pkg_ezmtp_homepage = https://github.com/a13x/ezmtp
+pkg_ezmtp_fetch = git
+pkg_ezmtp_repo = https://github.com/a13x/ezmtp
+pkg_ezmtp_commit = master
+
+PACKAGES += fast_disk_log
+pkg_fast_disk_log_name = fast_disk_log
+pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger
+pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_fetch = git
+pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log
+pkg_fast_disk_log_commit = master
+
+PACKAGES += feeder
+pkg_feeder_name = feeder
+pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds.
+pkg_feeder_homepage = https://github.com/michaelnisi/feeder
+pkg_feeder_fetch = git
+pkg_feeder_repo = https://github.com/michaelnisi/feeder
+pkg_feeder_commit = master
+
+PACKAGES += find_crate
+pkg_find_crate_name = find_crate
+pkg_find_crate_description = Find Rust libs and exes in Erlang application priv directory
+pkg_find_crate_homepage = https://github.com/goertzenator/find_crate
+pkg_find_crate_fetch = git
+pkg_find_crate_repo = https://github.com/goertzenator/find_crate
+pkg_find_crate_commit = master
+
+PACKAGES += fix
+pkg_fix_name = fix
+pkg_fix_description = http://fixprotocol.org/ implementation.
+pkg_fix_homepage = https://github.com/maxlapshin/fix
+pkg_fix_fetch = git
+pkg_fix_repo = https://github.com/maxlapshin/fix
+pkg_fix_commit = master
+
+PACKAGES += flower
+pkg_flower_name = flower
+pkg_flower_description = FlowER - a Erlang OpenFlow development platform
+pkg_flower_homepage = https://github.com/travelping/flower
+pkg_flower_fetch = git
+pkg_flower_repo = https://github.com/travelping/flower
+pkg_flower_commit = master
+
+PACKAGES += fn
+pkg_fn_name = fn
+pkg_fn_description = Function utilities for Erlang
+pkg_fn_homepage = https://github.com/reiddraper/fn
+pkg_fn_fetch = git
+pkg_fn_repo = https://github.com/reiddraper/fn
+pkg_fn_commit = master
+
+PACKAGES += folsom
+pkg_folsom_name = folsom
+pkg_folsom_description = Expose Erlang Events and Metrics
+pkg_folsom_homepage = https://github.com/boundary/folsom
+pkg_folsom_fetch = git
+pkg_folsom_repo = https://github.com/boundary/folsom
+pkg_folsom_commit = master
+
+PACKAGES += folsom_cowboy
+pkg_folsom_cowboy_name = folsom_cowboy
+pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper.
+pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_fetch = git
+pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy
+pkg_folsom_cowboy_commit = master
+
+PACKAGES += folsomite
+pkg_folsomite_name = folsomite
+pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics
+pkg_folsomite_homepage = https://github.com/campanja/folsomite
+pkg_folsomite_fetch = git
+pkg_folsomite_repo = https://github.com/campanja/folsomite
+pkg_folsomite_commit = master
+
+PACKAGES += fs
+pkg_fs_name = fs
+pkg_fs_description = Erlang FileSystem Listener
+pkg_fs_homepage = https://github.com/synrc/fs
+pkg_fs_fetch = git
+pkg_fs_repo = https://github.com/synrc/fs
+pkg_fs_commit = master
+
+PACKAGES += fuse
+pkg_fuse_name = fuse
+pkg_fuse_description = A Circuit Breaker for Erlang
+pkg_fuse_homepage = https://github.com/jlouis/fuse
+pkg_fuse_fetch = git
+pkg_fuse_repo = https://github.com/jlouis/fuse
+pkg_fuse_commit = master
+
+PACKAGES += gcm
+pkg_gcm_name = gcm
+pkg_gcm_description = An Erlang application for Google Cloud Messaging
+pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang
+pkg_gcm_fetch = git
+pkg_gcm_repo = https://github.com/pdincau/gcm-erlang
+pkg_gcm_commit = master
+
+PACKAGES += gcprof
+pkg_gcprof_name = gcprof
+pkg_gcprof_description = Garbage Collection profiler for Erlang
+pkg_gcprof_homepage = https://github.com/knutin/gcprof
+pkg_gcprof_fetch = git
+pkg_gcprof_repo = https://github.com/knutin/gcprof
+pkg_gcprof_commit = master
+
+PACKAGES += geas
+pkg_geas_name = geas
+pkg_geas_description = Guess Erlang Application Scattering
+pkg_geas_homepage = https://github.com/crownedgrouse/geas
+pkg_geas_fetch = git
+pkg_geas_repo = https://github.com/crownedgrouse/geas
+pkg_geas_commit = master
+
+PACKAGES += geef
+pkg_geef_name = geef
+pkg_geef_description = Git NEEEEF (Erlang NIF)
+pkg_geef_homepage = https://github.com/carlosmn/geef
+pkg_geef_fetch = git
+pkg_geef_repo = https://github.com/carlosmn/geef
+pkg_geef_commit = master
+
+PACKAGES += gen_coap
+pkg_gen_coap_name = gen_coap
+pkg_gen_coap_description = Generic Erlang CoAP Client/Server
+pkg_gen_coap_homepage = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_fetch = git
+pkg_gen_coap_repo = https://github.com/gotthardp/gen_coap
+pkg_gen_coap_commit = master
+
+PACKAGES += gen_cycle
+pkg_gen_cycle_name = gen_cycle
+pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks
+pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_fetch = git
+pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle
+pkg_gen_cycle_commit = develop
+
+PACKAGES += gen_icmp
+pkg_gen_icmp_name = gen_icmp
+pkg_gen_icmp_description = Erlang interface to ICMP sockets
+pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_fetch = git
+pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp
+pkg_gen_icmp_commit = master
+
+PACKAGES += gen_leader
+pkg_gen_leader_name = gen_leader
+pkg_gen_leader_description = leader election behavior
+pkg_gen_leader_homepage = https://github.com/garret-smith/gen_leader_revival
+pkg_gen_leader_fetch = git
+pkg_gen_leader_repo = https://github.com/garret-smith/gen_leader_revival
+pkg_gen_leader_commit = master
+
+PACKAGES += gen_nb_server
+pkg_gen_nb_server_name = gen_nb_server
+pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers
+pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_fetch = git
+pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server
+pkg_gen_nb_server_commit = master
+
+PACKAGES += gen_paxos
+pkg_gen_paxos_name = gen_paxos
+pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol
+pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_fetch = git
+pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos
+pkg_gen_paxos_commit = master
+
+PACKAGES += gen_rpc
+pkg_gen_rpc_name = gen_rpc
+pkg_gen_rpc_description = A scalable RPC library for Erlang-VM based languages
+pkg_gen_rpc_homepage = https://github.com/priestjim/gen_rpc.git
+pkg_gen_rpc_fetch = git
+pkg_gen_rpc_repo = https://github.com/priestjim/gen_rpc.git
+pkg_gen_rpc_commit = master
+
+PACKAGES += gen_smtp
+pkg_gen_smtp_name = gen_smtp
+pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules
+pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_fetch = git
+pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp
+pkg_gen_smtp_commit = master
+
+PACKAGES += gen_tracker
+pkg_gen_tracker_name = gen_tracker
+pkg_gen_tracker_description = supervisor with ets handling of children and their metadata
+pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_fetch = git
+pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker
+pkg_gen_tracker_commit = master
+
+PACKAGES += gen_unix
+pkg_gen_unix_name = gen_unix
+pkg_gen_unix_description = Erlang Unix socket interface
+pkg_gen_unix_homepage = https://github.com/msantos/gen_unix
+pkg_gen_unix_fetch = git
+pkg_gen_unix_repo = https://github.com/msantos/gen_unix
+pkg_gen_unix_commit = master
+
+PACKAGES += geode
+pkg_geode_name = geode
+pkg_geode_description = geohash/proximity lookup in pure, uncut erlang.
+pkg_geode_homepage = https://github.com/bradfordw/geode
+pkg_geode_fetch = git
+pkg_geode_repo = https://github.com/bradfordw/geode
+pkg_geode_commit = master
+
+PACKAGES += getopt
+pkg_getopt_name = getopt
+pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax
+pkg_getopt_homepage = https://github.com/jcomellas/getopt
+pkg_getopt_fetch = git
+pkg_getopt_repo = https://github.com/jcomellas/getopt
+pkg_getopt_commit = master
+
+PACKAGES += gettext
+pkg_gettext_name = gettext
+pkg_gettext_description = Erlang internationalization library.
+pkg_gettext_homepage = https://github.com/etnt/gettext
+pkg_gettext_fetch = git
+pkg_gettext_repo = https://github.com/etnt/gettext
+pkg_gettext_commit = master
+
+PACKAGES += giallo
+pkg_giallo_name = giallo
+pkg_giallo_description = Small and flexible web framework on top of Cowboy
+pkg_giallo_homepage = https://github.com/kivra/giallo
+pkg_giallo_fetch = git
+pkg_giallo_repo = https://github.com/kivra/giallo
+pkg_giallo_commit = master
+
+PACKAGES += gin
+pkg_gin_name = gin
+pkg_gin_description = The guards and for Erlang parse_transform
+pkg_gin_homepage = https://github.com/mad-cocktail/gin
+pkg_gin_fetch = git
+pkg_gin_repo = https://github.com/mad-cocktail/gin
+pkg_gin_commit = master
+
+PACKAGES += gitty
+pkg_gitty_name = gitty
+pkg_gitty_description = Git access in erlang
+pkg_gitty_homepage = https://github.com/maxlapshin/gitty
+pkg_gitty_fetch = git
+pkg_gitty_repo = https://github.com/maxlapshin/gitty
+pkg_gitty_commit = master
+
+PACKAGES += gold_fever
+pkg_gold_fever_name = gold_fever
+pkg_gold_fever_description = A Treasure Hunt for Erlangers
+pkg_gold_fever_homepage = https://github.com/inaka/gold_fever
+pkg_gold_fever_fetch = git
+pkg_gold_fever_repo = https://github.com/inaka/gold_fever
+pkg_gold_fever_commit = master
+
+PACKAGES += gpb
+pkg_gpb_name = gpb
+pkg_gpb_description = A Google Protobuf implementation for Erlang
+pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_fetch = git
+pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb
+pkg_gpb_commit = master
+
+PACKAGES += gproc
+pkg_gproc_name = gproc
+pkg_gproc_description = Extended process registry for Erlang
+pkg_gproc_homepage = https://github.com/uwiger/gproc
+pkg_gproc_fetch = git
+pkg_gproc_repo = https://github.com/uwiger/gproc
+pkg_gproc_commit = master
+
+PACKAGES += grapherl
+pkg_grapherl_name = grapherl
+pkg_grapherl_description = Create graphs of Erlang systems and programs
+pkg_grapherl_homepage = https://github.com/eproxus/grapherl
+pkg_grapherl_fetch = git
+pkg_grapherl_repo = https://github.com/eproxus/grapherl
+pkg_grapherl_commit = master
+
+PACKAGES += grpc
+pkg_grpc_name = grpc
+pkg_grpc_description = gRPC server in Erlang
+pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc
+pkg_grpc_fetch = git
+pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc
+pkg_grpc_commit = master
+
+PACKAGES += grpc_client
+pkg_grpc_client_name = grpc_client
+pkg_grpc_client_description = gRPC client in Erlang
+pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client
+pkg_grpc_client_fetch = git
+pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client
+pkg_grpc_client_commit = master
+
+PACKAGES += gun
+pkg_gun_name = gun
+pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang.
+pkg_gun_homepage = http//ninenines.eu
+pkg_gun_fetch = git
+pkg_gun_repo = https://github.com/ninenines/gun
+pkg_gun_commit = master
+
+PACKAGES += gut
+pkg_gut_name = gut
+pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman
+pkg_gut_homepage = https://github.com/unbalancedparentheses/gut
+pkg_gut_fetch = git
+pkg_gut_repo = https://github.com/unbalancedparentheses/gut
+pkg_gut_commit = master
+
+PACKAGES += hackney
+pkg_hackney_name = hackney
+pkg_hackney_description = simple HTTP client in Erlang
+pkg_hackney_homepage = https://github.com/benoitc/hackney
+pkg_hackney_fetch = git
+pkg_hackney_repo = https://github.com/benoitc/hackney
+pkg_hackney_commit = master
+
+PACKAGES += hamcrest
+pkg_hamcrest_name = hamcrest
+pkg_hamcrest_description = Erlang port of Hamcrest
+pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_fetch = git
+pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang
+pkg_hamcrest_commit = master
+
+PACKAGES += hanoidb
+pkg_hanoidb_name = hanoidb
+pkg_hanoidb_description = Erlang LSM BTree Storage
+pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_fetch = git
+pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb
+pkg_hanoidb_commit = master
+
+PACKAGES += hottub
+pkg_hottub_name = hottub
+pkg_hottub_description = Permanent Erlang Worker Pool
+pkg_hottub_homepage = https://github.com/bfrog/hottub
+pkg_hottub_fetch = git
+pkg_hottub_repo = https://github.com/bfrog/hottub
+pkg_hottub_commit = master
+
+PACKAGES += hpack
+pkg_hpack_name = hpack
+pkg_hpack_description = HPACK Implementation for Erlang
+pkg_hpack_homepage = https://github.com/joedevivo/hpack
+pkg_hpack_fetch = git
+pkg_hpack_repo = https://github.com/joedevivo/hpack
+pkg_hpack_commit = master
+
+PACKAGES += hyper
+pkg_hyper_name = hyper
+pkg_hyper_description = Erlang implementation of HyperLogLog
+pkg_hyper_homepage = https://github.com/GameAnalytics/hyper
+pkg_hyper_fetch = git
+pkg_hyper_repo = https://github.com/GameAnalytics/hyper
+pkg_hyper_commit = master
+
+PACKAGES += i18n
+pkg_i18n_name = i18n
+pkg_i18n_description = International components for unicode from Erlang (unicode, date, string, number, format, locale, localization, transliteration, icu4e)
+pkg_i18n_homepage = https://github.com/erlang-unicode/i18n
+pkg_i18n_fetch = git
+pkg_i18n_repo = https://github.com/erlang-unicode/i18n
+pkg_i18n_commit = master
+
+PACKAGES += ibrowse
+pkg_ibrowse_name = ibrowse
+pkg_ibrowse_description = Erlang HTTP client
+pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_fetch = git
+pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse
+pkg_ibrowse_commit = master
+
+PACKAGES += idna
+pkg_idna_name = idna
+pkg_idna_description = Erlang IDNA lib
+pkg_idna_homepage = https://github.com/benoitc/erlang-idna
+pkg_idna_fetch = git
+pkg_idna_repo = https://github.com/benoitc/erlang-idna
+pkg_idna_commit = master
+
+PACKAGES += ierlang
+pkg_ierlang_name = ierlang
+pkg_ierlang_description = An Erlang language kernel for IPython.
+pkg_ierlang_homepage = https://github.com/robbielynch/ierlang
+pkg_ierlang_fetch = git
+pkg_ierlang_repo = https://github.com/robbielynch/ierlang
+pkg_ierlang_commit = master
+
+PACKAGES += iota
+pkg_iota_name = iota
+pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code
+pkg_iota_homepage = https://github.com/jpgneves/iota
+pkg_iota_fetch = git
+pkg_iota_repo = https://github.com/jpgneves/iota
+pkg_iota_commit = master
+
+PACKAGES += irc_lib
+pkg_irc_lib_name = irc_lib
+pkg_irc_lib_description = Erlang irc client library
+pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_fetch = git
+pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib
+pkg_irc_lib_commit = master
+
+PACKAGES += ircd
+pkg_ircd_name = ircd
+pkg_ircd_description = A pluggable IRC daemon application/library for Erlang.
+pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd
+pkg_ircd_fetch = git
+pkg_ircd_repo = https://github.com/tonyg/erlang-ircd
+pkg_ircd_commit = master
+
+PACKAGES += iris
+pkg_iris_name = iris
+pkg_iris_description = Iris Erlang binding
+pkg_iris_homepage = https://github.com/project-iris/iris-erl
+pkg_iris_fetch = git
+pkg_iris_repo = https://github.com/project-iris/iris-erl
+pkg_iris_commit = master
+
+PACKAGES += iso8601
+pkg_iso8601_name = iso8601
+pkg_iso8601_description = Erlang ISO 8601 date formatter/parser
+pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_fetch = git
+pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601
+pkg_iso8601_commit = master
+
+PACKAGES += jamdb_sybase
+pkg_jamdb_sybase_name = jamdb_sybase
+pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE
+pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_fetch = git
+pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase
+pkg_jamdb_sybase_commit = master
+
+PACKAGES += jerg
+pkg_jerg_name = jerg
+pkg_jerg_description = JSON Schema to Erlang Records Generator
+pkg_jerg_homepage = https://github.com/ddossot/jerg
+pkg_jerg_fetch = git
+pkg_jerg_repo = https://github.com/ddossot/jerg
+pkg_jerg_commit = master
+
+PACKAGES += jesse
+pkg_jesse_name = jesse
+pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang.
+pkg_jesse_homepage = https://github.com/for-GET/jesse
+pkg_jesse_fetch = git
+pkg_jesse_repo = https://github.com/for-GET/jesse
+pkg_jesse_commit = master
+
+PACKAGES += jiffy
+pkg_jiffy_name = jiffy
+pkg_jiffy_description = JSON NIFs for Erlang.
+pkg_jiffy_homepage = https://github.com/davisp/jiffy
+pkg_jiffy_fetch = git
+pkg_jiffy_repo = https://github.com/davisp/jiffy
+pkg_jiffy_commit = master
+
+PACKAGES += jiffy_v
+pkg_jiffy_v_name = jiffy_v
+pkg_jiffy_v_description = JSON validation utility
+pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_fetch = git
+pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v
+pkg_jiffy_v_commit = master
+
+PACKAGES += jobs
+pkg_jobs_name = jobs
+pkg_jobs_description = a Job scheduler for load regulation
+pkg_jobs_homepage = https://github.com/esl/jobs
+pkg_jobs_fetch = git
+pkg_jobs_repo = https://github.com/esl/jobs
+pkg_jobs_commit = master
+
+PACKAGES += joxa
+pkg_joxa_name = joxa
+pkg_joxa_description = A Modern Lisp for the Erlang VM
+pkg_joxa_homepage = https://github.com/joxa/joxa
+pkg_joxa_fetch = git
+pkg_joxa_repo = https://github.com/joxa/joxa
+pkg_joxa_commit = master
+
+PACKAGES += json
+pkg_json_name = json
+pkg_json_description = a high level json library for erlang (17.0+)
+pkg_json_homepage = https://github.com/talentdeficit/json
+pkg_json_fetch = git
+pkg_json_repo = https://github.com/talentdeficit/json
+pkg_json_commit = master
+
+PACKAGES += json_rec
+pkg_json_rec_name = json_rec
+pkg_json_rec_description = JSON to erlang record
+pkg_json_rec_homepage = https://github.com/justinkirby/json_rec
+pkg_json_rec_fetch = git
+pkg_json_rec_repo = https://github.com/justinkirby/json_rec
+pkg_json_rec_commit = master
+
+PACKAGES += jsone
+pkg_jsone_name = jsone
+pkg_jsone_description = An Erlang library for encoding, decoding JSON data.
+pkg_jsone_homepage = https://github.com/sile/jsone.git
+pkg_jsone_fetch = git
+pkg_jsone_repo = https://github.com/sile/jsone.git
+pkg_jsone_commit = master
+
+PACKAGES += jsonerl
+pkg_jsonerl_name = jsonerl
+pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder
+pkg_jsonerl_homepage = https://github.com/lambder/jsonerl
+pkg_jsonerl_fetch = git
+pkg_jsonerl_repo = https://github.com/lambder/jsonerl
+pkg_jsonerl_commit = master
+
+PACKAGES += jsonpath
+pkg_jsonpath_name = jsonpath
+pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation
+pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_fetch = git
+pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath
+pkg_jsonpath_commit = master
+
+PACKAGES += jsonx
+pkg_jsonx_name = jsonx
+pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C.
+pkg_jsonx_homepage = https://github.com/iskra/jsonx
+pkg_jsonx_fetch = git
+pkg_jsonx_repo = https://github.com/iskra/jsonx
+pkg_jsonx_commit = master
+
+PACKAGES += jsx
+pkg_jsx_name = jsx
+pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON.
+pkg_jsx_homepage = https://github.com/talentdeficit/jsx
+pkg_jsx_fetch = git
+pkg_jsx_repo = https://github.com/talentdeficit/jsx
+pkg_jsx_commit = master
+
+PACKAGES += kafka
+pkg_kafka_name = kafka
+pkg_kafka_description = Kafka consumer and producer in Erlang
+pkg_kafka_homepage = https://github.com/wooga/kafka-erlang
+pkg_kafka_fetch = git
+pkg_kafka_repo = https://github.com/wooga/kafka-erlang
+pkg_kafka_commit = master
+
+PACKAGES += kafka_protocol
+pkg_kafka_protocol_name = kafka_protocol
+pkg_kafka_protocol_description = Kafka protocol Erlang library
+pkg_kafka_protocol_homepage = https://github.com/klarna/kafka_protocol
+pkg_kafka_protocol_fetch = git
+pkg_kafka_protocol_repo = https://github.com/klarna/kafka_protocol.git
+pkg_kafka_protocol_commit = master
+
+PACKAGES += kai
+pkg_kai_name = kai
+pkg_kai_description = DHT storage by Takeshi Inoue
+pkg_kai_homepage = https://github.com/synrc/kai
+pkg_kai_fetch = git
+pkg_kai_repo = https://github.com/synrc/kai
+pkg_kai_commit = master
+
+PACKAGES += katja
+pkg_katja_name = katja
+pkg_katja_description = A simple Riemann client written in Erlang.
+pkg_katja_homepage = https://github.com/nifoc/katja
+pkg_katja_fetch = git
+pkg_katja_repo = https://github.com/nifoc/katja
+pkg_katja_commit = master
+
+PACKAGES += kdht
+pkg_kdht_name = kdht
+pkg_kdht_description = kdht is an erlang DHT implementation
+pkg_kdht_homepage = https://github.com/kevinlynx/kdht
+pkg_kdht_fetch = git
+pkg_kdht_repo = https://github.com/kevinlynx/kdht
+pkg_kdht_commit = master
+
+PACKAGES += key2value
+pkg_key2value_name = key2value
+pkg_key2value_description = Erlang 2-way map
+pkg_key2value_homepage = https://github.com/okeuday/key2value
+pkg_key2value_fetch = git
+pkg_key2value_repo = https://github.com/okeuday/key2value
+pkg_key2value_commit = master
+
+PACKAGES += keys1value
+pkg_keys1value_name = keys1value
+pkg_keys1value_description = Erlang set associative map for key lists
+pkg_keys1value_homepage = https://github.com/okeuday/keys1value
+pkg_keys1value_fetch = git
+pkg_keys1value_repo = https://github.com/okeuday/keys1value
+pkg_keys1value_commit = master
+
+PACKAGES += kinetic
+pkg_kinetic_name = kinetic
+pkg_kinetic_description = Erlang Kinesis Client
+pkg_kinetic_homepage = https://github.com/AdRoll/kinetic
+pkg_kinetic_fetch = git
+pkg_kinetic_repo = https://github.com/AdRoll/kinetic
+pkg_kinetic_commit = master
+
+PACKAGES += kjell
+pkg_kjell_name = kjell
+pkg_kjell_description = Erlang Shell
+pkg_kjell_homepage = https://github.com/karlll/kjell
+pkg_kjell_fetch = git
+pkg_kjell_repo = https://github.com/karlll/kjell
+pkg_kjell_commit = master
+
+PACKAGES += kraken
+pkg_kraken_name = kraken
+pkg_kraken_description = Distributed Pubsub Server for Realtime Apps
+pkg_kraken_homepage = https://github.com/Asana/kraken
+pkg_kraken_fetch = git
+pkg_kraken_repo = https://github.com/Asana/kraken
+pkg_kraken_commit = master
+
+PACKAGES += kucumberl
+pkg_kucumberl_name = kucumberl
+pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber
+pkg_kucumberl_homepage = https://github.com/openshine/kucumberl
+pkg_kucumberl_fetch = git
+pkg_kucumberl_repo = https://github.com/openshine/kucumberl
+pkg_kucumberl_commit = master
+
+PACKAGES += kvc
+pkg_kvc_name = kvc
+pkg_kvc_description = KVC - Key Value Coding for Erlang data structures
+pkg_kvc_homepage = https://github.com/etrepum/kvc
+pkg_kvc_fetch = git
+pkg_kvc_repo = https://github.com/etrepum/kvc
+pkg_kvc_commit = master
+
+PACKAGES += kvlists
+pkg_kvlists_name = kvlists
+pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang
+pkg_kvlists_homepage = https://github.com/jcomellas/kvlists
+pkg_kvlists_fetch = git
+pkg_kvlists_repo = https://github.com/jcomellas/kvlists
+pkg_kvlists_commit = master
+
+PACKAGES += kvs
+pkg_kvs_name = kvs
+pkg_kvs_description = Container and Iterator
+pkg_kvs_homepage = https://github.com/synrc/kvs
+pkg_kvs_fetch = git
+pkg_kvs_repo = https://github.com/synrc/kvs
+pkg_kvs_commit = master
+
+PACKAGES += lager
+pkg_lager_name = lager
+pkg_lager_description = A logging framework for Erlang/OTP.
+pkg_lager_homepage = https://github.com/erlang-lager/lager
+pkg_lager_fetch = git
+pkg_lager_repo = https://github.com/erlang-lager/lager
+pkg_lager_commit = master
+
+PACKAGES += lager_amqp_backend
+pkg_lager_amqp_backend_name = lager_amqp_backend
+pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend
+pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_fetch = git
+pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend
+pkg_lager_amqp_backend_commit = master
+
+PACKAGES += lager_syslog
+pkg_lager_syslog_name = lager_syslog
+pkg_lager_syslog_description = Syslog backend for lager
+pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog
+pkg_lager_syslog_fetch = git
+pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog
+pkg_lager_syslog_commit = master
+
+PACKAGES += lambdapad
+pkg_lambdapad_name = lambdapad
+pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang.
+pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad
+pkg_lambdapad_fetch = git
+pkg_lambdapad_repo = https://github.com/gar1t/lambdapad
+pkg_lambdapad_commit = master
+
+PACKAGES += lasp
+pkg_lasp_name = lasp
+pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations
+pkg_lasp_homepage = http://lasp-lang.org/
+pkg_lasp_fetch = git
+pkg_lasp_repo = https://github.com/lasp-lang/lasp
+pkg_lasp_commit = master
+
+PACKAGES += lasse
+pkg_lasse_name = lasse
+pkg_lasse_description = SSE handler for Cowboy
+pkg_lasse_homepage = https://github.com/inaka/lasse
+pkg_lasse_fetch = git
+pkg_lasse_repo = https://github.com/inaka/lasse
+pkg_lasse_commit = master
+
+PACKAGES += ldap
+pkg_ldap_name = ldap
+pkg_ldap_description = LDAP server written in Erlang
+pkg_ldap_homepage = https://github.com/spawnproc/ldap
+pkg_ldap_fetch = git
+pkg_ldap_repo = https://github.com/spawnproc/ldap
+pkg_ldap_commit = master
+
+PACKAGES += lethink
+pkg_lethink_name = lethink
+pkg_lethink_description = erlang driver for rethinkdb
+pkg_lethink_homepage = https://github.com/taybin/lethink
+pkg_lethink_fetch = git
+pkg_lethink_repo = https://github.com/taybin/lethink
+pkg_lethink_commit = master
+
+PACKAGES += lfe
+pkg_lfe_name = lfe
+pkg_lfe_description = Lisp Flavoured Erlang (LFE)
+pkg_lfe_homepage = https://github.com/rvirding/lfe
+pkg_lfe_fetch = git
+pkg_lfe_repo = https://github.com/rvirding/lfe
+pkg_lfe_commit = master
+
+PACKAGES += ling
+pkg_ling_name = ling
+pkg_ling_description = Erlang on Xen
+pkg_ling_homepage = https://github.com/cloudozer/ling
+pkg_ling_fetch = git
+pkg_ling_repo = https://github.com/cloudozer/ling
+pkg_ling_commit = master
+
+PACKAGES += live
+pkg_live_name = live
+pkg_live_description = Automated module and configuration reloader.
+pkg_live_homepage = http://ninenines.eu
+pkg_live_fetch = git
+pkg_live_repo = https://github.com/ninenines/live
+pkg_live_commit = master
+
+PACKAGES += lmq
+pkg_lmq_name = lmq
+pkg_lmq_description = Lightweight Message Queue
+pkg_lmq_homepage = https://github.com/iij/lmq
+pkg_lmq_fetch = git
+pkg_lmq_repo = https://github.com/iij/lmq
+pkg_lmq_commit = master
+
+PACKAGES += locker
+pkg_locker_name = locker
+pkg_locker_description = Atomic distributed 'check and set' for short-lived keys
+pkg_locker_homepage = https://github.com/wooga/locker
+pkg_locker_fetch = git
+pkg_locker_repo = https://github.com/wooga/locker
+pkg_locker_commit = master
+
+PACKAGES += locks
+pkg_locks_name = locks
+pkg_locks_description = A scalable, deadlock-resolving resource locker
+pkg_locks_homepage = https://github.com/uwiger/locks
+pkg_locks_fetch = git
+pkg_locks_repo = https://github.com/uwiger/locks
+pkg_locks_commit = master
+
+PACKAGES += log4erl
+pkg_log4erl_name = log4erl
+pkg_log4erl_description = A logger for erlang in the spirit of Log4J.
+pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl
+pkg_log4erl_fetch = git
+pkg_log4erl_repo = https://github.com/ahmednawras/log4erl
+pkg_log4erl_commit = master
+
+PACKAGES += lol
+pkg_lol_name = lol
+pkg_lol_description = Lisp on erLang, and programming is fun again
+pkg_lol_homepage = https://github.com/b0oh/lol
+pkg_lol_fetch = git
+pkg_lol_repo = https://github.com/b0oh/lol
+pkg_lol_commit = master
+
+PACKAGES += lucid
+pkg_lucid_name = lucid
+pkg_lucid_description = HTTP/2 server written in Erlang
+pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_fetch = git
+pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid
+pkg_lucid_commit = master
+
+PACKAGES += luerl
+pkg_luerl_name = luerl
+pkg_luerl_description = Lua in Erlang
+pkg_luerl_homepage = https://github.com/rvirding/luerl
+pkg_luerl_fetch = git
+pkg_luerl_repo = https://github.com/rvirding/luerl
+pkg_luerl_commit = develop
+
+PACKAGES += luwak
+pkg_luwak_name = luwak
+pkg_luwak_description = Large-object storage interface for Riak
+pkg_luwak_homepage = https://github.com/basho/luwak
+pkg_luwak_fetch = git
+pkg_luwak_repo = https://github.com/basho/luwak
+pkg_luwak_commit = master
+
+PACKAGES += lux
+pkg_lux_name = lux
+pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands
+pkg_lux_homepage = https://github.com/hawk/lux
+pkg_lux_fetch = git
+pkg_lux_repo = https://github.com/hawk/lux
+pkg_lux_commit = master
+
+PACKAGES += machi
+pkg_machi_name = machi
+pkg_machi_description = Machi file store
+pkg_machi_homepage = https://github.com/basho/machi
+pkg_machi_fetch = git
+pkg_machi_repo = https://github.com/basho/machi
+pkg_machi_commit = master
+
+PACKAGES += mad
+pkg_mad_name = mad
+pkg_mad_description = Small and Fast Rebar Replacement
+pkg_mad_homepage = https://github.com/synrc/mad
+pkg_mad_fetch = git
+pkg_mad_repo = https://github.com/synrc/mad
+pkg_mad_commit = master
+
+PACKAGES += marina
+pkg_marina_name = marina
+pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client
+pkg_marina_homepage = https://github.com/lpgauth/marina
+pkg_marina_fetch = git
+pkg_marina_repo = https://github.com/lpgauth/marina
+pkg_marina_commit = master
+
+PACKAGES += mavg
+pkg_mavg_name = mavg
+pkg_mavg_description = Erlang :: Exponential moving average library
+pkg_mavg_homepage = https://github.com/EchoTeam/mavg
+pkg_mavg_fetch = git
+pkg_mavg_repo = https://github.com/EchoTeam/mavg
+pkg_mavg_commit = master
+
+PACKAGES += mc_erl
+pkg_mc_erl_name = mc_erl
+pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang.
+pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl
+pkg_mc_erl_fetch = git
+pkg_mc_erl_repo = https://github.com/clonejo/mc-erl
+pkg_mc_erl_commit = master
+
+PACKAGES += mcd
+pkg_mcd_name = mcd
+pkg_mcd_description = Fast memcached protocol client in pure Erlang
+pkg_mcd_homepage = https://github.com/EchoTeam/mcd
+pkg_mcd_fetch = git
+pkg_mcd_repo = https://github.com/EchoTeam/mcd
+pkg_mcd_commit = master
+
+PACKAGES += mcerlang
+pkg_mcerlang_name = mcerlang
+pkg_mcerlang_description = The McErlang model checker for Erlang
+pkg_mcerlang_homepage = https://github.com/fredlund/McErlang
+pkg_mcerlang_fetch = git
+pkg_mcerlang_repo = https://github.com/fredlund/McErlang
+pkg_mcerlang_commit = master
+
+PACKAGES += meck
+pkg_meck_name = meck
+pkg_meck_description = A mocking library for Erlang
+pkg_meck_homepage = https://github.com/eproxus/meck
+pkg_meck_fetch = git
+pkg_meck_repo = https://github.com/eproxus/meck
+pkg_meck_commit = master
+
+PACKAGES += mekao
+pkg_mekao_name = mekao
+pkg_mekao_description = SQL constructor
+pkg_mekao_homepage = https://github.com/ddosia/mekao
+pkg_mekao_fetch = git
+pkg_mekao_repo = https://github.com/ddosia/mekao
+pkg_mekao_commit = master
+
+PACKAGES += memo
+pkg_memo_name = memo
+pkg_memo_description = Erlang memoization server
+pkg_memo_homepage = https://github.com/tuncer/memo
+pkg_memo_fetch = git
+pkg_memo_repo = https://github.com/tuncer/memo
+pkg_memo_commit = master
+
+PACKAGES += merge_index
+pkg_merge_index_name = merge_index
+pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop).
+pkg_merge_index_homepage = https://github.com/basho/merge_index
+pkg_merge_index_fetch = git
+pkg_merge_index_repo = https://github.com/basho/merge_index
+pkg_merge_index_commit = master
+
+PACKAGES += merl
+pkg_merl_name = merl
+pkg_merl_description = Metaprogramming in Erlang
+pkg_merl_homepage = https://github.com/richcarl/merl
+pkg_merl_fetch = git
+pkg_merl_repo = https://github.com/richcarl/merl
+pkg_merl_commit = master
+
+PACKAGES += mimerl
+pkg_mimerl_name = mimerl
+pkg_mimerl_description = library to handle mimetypes
+pkg_mimerl_homepage = https://github.com/benoitc/mimerl
+pkg_mimerl_fetch = git
+pkg_mimerl_repo = https://github.com/benoitc/mimerl
+pkg_mimerl_commit = master
+
+PACKAGES += mimetypes
+pkg_mimetypes_name = mimetypes
+pkg_mimetypes_description = Erlang MIME types library
+pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_fetch = git
+pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes
+pkg_mimetypes_commit = master
+
+PACKAGES += mixer
+pkg_mixer_name = mixer
+pkg_mixer_description = Mix in functions from other modules
+pkg_mixer_homepage = https://github.com/chef/mixer
+pkg_mixer_fetch = git
+pkg_mixer_repo = https://github.com/chef/mixer
+pkg_mixer_commit = master
+
+PACKAGES += mochiweb
+pkg_mochiweb_name = mochiweb
+pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers.
+pkg_mochiweb_homepage = https://github.com/mochi/mochiweb
+pkg_mochiweb_fetch = git
+pkg_mochiweb_repo = https://github.com/mochi/mochiweb
+pkg_mochiweb_commit = master
+
+PACKAGES += mochiweb_xpath
+pkg_mochiweb_xpath_name = mochiweb_xpath
+pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser
+pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_fetch = git
+pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath
+pkg_mochiweb_xpath_commit = master
+
+PACKAGES += mockgyver
+pkg_mockgyver_name = mockgyver
+pkg_mockgyver_description = A mocking library for Erlang
+pkg_mockgyver_homepage = https://github.com/klajo/mockgyver
+pkg_mockgyver_fetch = git
+pkg_mockgyver_repo = https://github.com/klajo/mockgyver
+pkg_mockgyver_commit = master
+
+PACKAGES += modlib
+pkg_modlib_name = modlib
+pkg_modlib_description = Web framework based on Erlang's inets httpd
+pkg_modlib_homepage = https://github.com/gar1t/modlib
+pkg_modlib_fetch = git
+pkg_modlib_repo = https://github.com/gar1t/modlib
+pkg_modlib_commit = master
+
+PACKAGES += mongodb
+pkg_mongodb_name = mongodb
+pkg_mongodb_description = MongoDB driver for Erlang
+pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_fetch = git
+pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang
+pkg_mongodb_commit = master
+
+PACKAGES += mongooseim
+pkg_mongooseim_name = mongooseim
+pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions
+pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform
+pkg_mongooseim_fetch = git
+pkg_mongooseim_repo = https://github.com/esl/MongooseIM
+pkg_mongooseim_commit = master
+
+PACKAGES += moyo
+pkg_moyo_name = moyo
+pkg_moyo_description = Erlang utility functions library
+pkg_moyo_homepage = https://github.com/dwango/moyo
+pkg_moyo_fetch = git
+pkg_moyo_repo = https://github.com/dwango/moyo
+pkg_moyo_commit = master
+
+PACKAGES += msgpack
+pkg_msgpack_name = msgpack
+pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang
+pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_fetch = git
+pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang
+pkg_msgpack_commit = master
+
+PACKAGES += mu2
+pkg_mu2_name = mu2
+pkg_mu2_description = Erlang mutation testing tool
+pkg_mu2_homepage = https://github.com/ramsay-t/mu2
+pkg_mu2_fetch = git
+pkg_mu2_repo = https://github.com/ramsay-t/mu2
+pkg_mu2_commit = master
+
+PACKAGES += mustache
+pkg_mustache_name = mustache
+pkg_mustache_description = Mustache template engine for Erlang.
+pkg_mustache_homepage = https://github.com/mojombo/mustache.erl
+pkg_mustache_fetch = git
+pkg_mustache_repo = https://github.com/mojombo/mustache.erl
+pkg_mustache_commit = master
+
+PACKAGES += myproto
+pkg_myproto_name = myproto
+pkg_myproto_description = MySQL Server Protocol in Erlang
+pkg_myproto_homepage = https://github.com/altenwald/myproto
+pkg_myproto_fetch = git
+pkg_myproto_repo = https://github.com/altenwald/myproto
+pkg_myproto_commit = master
+
+PACKAGES += mysql
+pkg_mysql_name = mysql
+pkg_mysql_description = MySQL client library for Erlang/OTP
+pkg_mysql_homepage = https://github.com/mysql-otp/mysql-otp
+pkg_mysql_fetch = git
+pkg_mysql_repo = https://github.com/mysql-otp/mysql-otp
+pkg_mysql_commit = 1.5.1
+
+PACKAGES += n2o
+pkg_n2o_name = n2o
+pkg_n2o_description = WebSocket Application Server
+pkg_n2o_homepage = https://github.com/5HT/n2o
+pkg_n2o_fetch = git
+pkg_n2o_repo = https://github.com/5HT/n2o
+pkg_n2o_commit = master
+
+PACKAGES += nat_upnp
+pkg_nat_upnp_name = nat_upnp
+pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD
+pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_fetch = git
+pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp
+pkg_nat_upnp_commit = master
+
+PACKAGES += neo4j
+pkg_neo4j_name = neo4j
+pkg_neo4j_description = Erlang client library for Neo4J.
+pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_fetch = git
+pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang
+pkg_neo4j_commit = master
+
+PACKAGES += neotoma
+pkg_neotoma_name = neotoma
+pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars.
+pkg_neotoma_homepage = https://github.com/seancribbs/neotoma
+pkg_neotoma_fetch = git
+pkg_neotoma_repo = https://github.com/seancribbs/neotoma
+pkg_neotoma_commit = master
+
+PACKAGES += newrelic
+pkg_newrelic_name = newrelic
+pkg_newrelic_description = Erlang library for sending metrics to New Relic
+pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_fetch = git
+pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang
+pkg_newrelic_commit = master
+
+PACKAGES += nifty
+pkg_nifty_name = nifty
+pkg_nifty_description = Erlang NIF wrapper generator
+pkg_nifty_homepage = https://github.com/parapluu/nifty
+pkg_nifty_fetch = git
+pkg_nifty_repo = https://github.com/parapluu/nifty
+pkg_nifty_commit = master
+
+PACKAGES += nitrogen_core
+pkg_nitrogen_core_name = nitrogen_core
+pkg_nitrogen_core_description = The core Nitrogen library.
+pkg_nitrogen_core_homepage = http://nitrogenproject.com/
+pkg_nitrogen_core_fetch = git
+pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core
+pkg_nitrogen_core_commit = master
+
+PACKAGES += nkbase
+pkg_nkbase_name = nkbase
+pkg_nkbase_description = NkBASE distributed database
+pkg_nkbase_homepage = https://github.com/Nekso/nkbase
+pkg_nkbase_fetch = git
+pkg_nkbase_repo = https://github.com/Nekso/nkbase
+pkg_nkbase_commit = develop
+
+PACKAGES += nkdocker
+pkg_nkdocker_name = nkdocker
+pkg_nkdocker_description = Erlang Docker client
+pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker
+pkg_nkdocker_fetch = git
+pkg_nkdocker_repo = https://github.com/Nekso/nkdocker
+pkg_nkdocker_commit = master
+
+PACKAGES += nkpacket
+pkg_nkpacket_name = nkpacket
+pkg_nkpacket_description = Generic Erlang transport layer
+pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket
+pkg_nkpacket_fetch = git
+pkg_nkpacket_repo = https://github.com/Nekso/nkpacket
+pkg_nkpacket_commit = master
+
+PACKAGES += nksip
+pkg_nksip_name = nksip
+pkg_nksip_description = Erlang SIP application server
+pkg_nksip_homepage = https://github.com/kalta/nksip
+pkg_nksip_fetch = git
+pkg_nksip_repo = https://github.com/kalta/nksip
+pkg_nksip_commit = master
+
+PACKAGES += nodefinder
+pkg_nodefinder_name = nodefinder
+pkg_nodefinder_description = automatic node discovery via UDP multicast
+pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder
+pkg_nodefinder_fetch = git
+pkg_nodefinder_repo = https://github.com/okeuday/nodefinder
+pkg_nodefinder_commit = master
+
+PACKAGES += nprocreg
+pkg_nprocreg_name = nprocreg
+pkg_nprocreg_description = Minimal Distributed Erlang Process Registry
+pkg_nprocreg_homepage = http://nitrogenproject.com/
+pkg_nprocreg_fetch = git
+pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg
+pkg_nprocreg_commit = master
+
+PACKAGES += oauth
+pkg_oauth_name = oauth
+pkg_oauth_description = An Erlang OAuth 1.0 implementation
+pkg_oauth_homepage = https://github.com/tim/erlang-oauth
+pkg_oauth_fetch = git
+pkg_oauth_repo = https://github.com/tim/erlang-oauth
+pkg_oauth_commit = master
+
+PACKAGES += oauth2
+pkg_oauth2_name = oauth2
+pkg_oauth2_description = Erlang Oauth2 implementation
+pkg_oauth2_homepage = https://github.com/kivra/oauth2
+pkg_oauth2_fetch = git
+pkg_oauth2_repo = https://github.com/kivra/oauth2
+pkg_oauth2_commit = master
+
+PACKAGES += observer_cli
+pkg_observer_cli_name = observer_cli
+pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line
+pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli
+pkg_observer_cli_fetch = git
+pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli
+pkg_observer_cli_commit = master
+
+PACKAGES += octopus
+pkg_octopus_name = octopus
+pkg_octopus_description = Small and flexible pool manager written in Erlang
+pkg_octopus_homepage = https://github.com/erlangbureau/octopus
+pkg_octopus_fetch = git
+pkg_octopus_repo = https://github.com/erlangbureau/octopus
+pkg_octopus_commit = master
+
+PACKAGES += of_protocol
+pkg_of_protocol_name = of_protocol
+pkg_of_protocol_description = OpenFlow Protocol Library for Erlang
+pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_fetch = git
+pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol
+pkg_of_protocol_commit = master
+
+PACKAGES += opencouch
+pkg_opencouch_name = couch
+pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB
+pkg_opencouch_homepage = https://github.com/benoitc/opencouch
+pkg_opencouch_fetch = git
+pkg_opencouch_repo = https://github.com/benoitc/opencouch
+pkg_opencouch_commit = master
+
+PACKAGES += openflow
+pkg_openflow_name = openflow
+pkg_openflow_description = An OpenFlow controller written in pure erlang
+pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_fetch = git
+pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow
+pkg_openflow_commit = master
+
+PACKAGES += openid
+pkg_openid_name = openid
+pkg_openid_description = Erlang OpenID
+pkg_openid_homepage = https://github.com/brendonh/erl_openid
+pkg_openid_fetch = git
+pkg_openid_repo = https://github.com/brendonh/erl_openid
+pkg_openid_commit = master
+
+PACKAGES += openpoker
+pkg_openpoker_name = openpoker
+pkg_openpoker_description = Genesis Texas hold'em Game Server
+pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker
+pkg_openpoker_fetch = git
+pkg_openpoker_repo = https://github.com/hpyhacking/openpoker
+pkg_openpoker_commit = master
+
+PACKAGES += otpbp
+pkg_otpbp_name = otpbp
+pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19)
+pkg_otpbp_homepage = https://github.com/Ledest/otpbp
+pkg_otpbp_fetch = git
+pkg_otpbp_repo = https://github.com/Ledest/otpbp
+pkg_otpbp_commit = master
+
+PACKAGES += pal
+pkg_pal_name = pal
+pkg_pal_description = Pragmatic Authentication Library
+pkg_pal_homepage = https://github.com/manifest/pal
+pkg_pal_fetch = git
+pkg_pal_repo = https://github.com/manifest/pal
+pkg_pal_commit = master
+
+PACKAGES += parse_trans
+pkg_parse_trans_name = parse_trans
+pkg_parse_trans_description = Parse transform utilities for Erlang
+pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans
+pkg_parse_trans_fetch = git
+pkg_parse_trans_repo = https://github.com/uwiger/parse_trans
+pkg_parse_trans_commit = master
+
+PACKAGES += parsexml
+pkg_parsexml_name = parsexml
+pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API
+pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml
+pkg_parsexml_fetch = git
+pkg_parsexml_repo = https://github.com/maxlapshin/parsexml
+pkg_parsexml_commit = master
+
+PACKAGES += partisan
+pkg_partisan_name = partisan
+pkg_partisan_description = High-performance, high-scalability distributed computing with Erlang and Elixir.
+pkg_partisan_homepage = http://partisan.cloud
+pkg_partisan_fetch = git
+pkg_partisan_repo = https://github.com/lasp-lang/partisan
+pkg_partisan_commit = master
+
+PACKAGES += pegjs
+pkg_pegjs_name = pegjs
+pkg_pegjs_description = An implementation of PEG.js grammar for Erlang.
+pkg_pegjs_homepage = https://github.com/dmitriid/pegjs
+pkg_pegjs_fetch = git
+pkg_pegjs_repo = https://github.com/dmitriid/pegjs
+pkg_pegjs_commit = master
+
+PACKAGES += percept2
+pkg_percept2_name = percept2
+pkg_percept2_description = Concurrent profiling tool for Erlang
+pkg_percept2_homepage = https://github.com/huiqing/percept2
+pkg_percept2_fetch = git
+pkg_percept2_repo = https://github.com/huiqing/percept2
+pkg_percept2_commit = master
+
+PACKAGES += pgo
+pkg_pgo_name = pgo
+pkg_pgo_description = Erlang Postgres client and connection pool
+pkg_pgo_homepage = https://github.com/erleans/pgo.git
+pkg_pgo_fetch = git
+pkg_pgo_repo = https://github.com/erleans/pgo.git
+pkg_pgo_commit = master
+
+PACKAGES += pgsql
+pkg_pgsql_name = pgsql
+pkg_pgsql_description = Erlang PostgreSQL driver
+pkg_pgsql_homepage = https://github.com/semiocast/pgsql
+pkg_pgsql_fetch = git
+pkg_pgsql_repo = https://github.com/semiocast/pgsql
+pkg_pgsql_commit = master
+
+PACKAGES += pkgx
+pkg_pkgx_name = pkgx
+pkg_pkgx_description = Build .deb packages from Erlang releases
+pkg_pkgx_homepage = https://github.com/arjan/pkgx
+pkg_pkgx_fetch = git
+pkg_pkgx_repo = https://github.com/arjan/pkgx
+pkg_pkgx_commit = master
+
+PACKAGES += pkt
+pkg_pkt_name = pkt
+pkg_pkt_description = Erlang network protocol library
+pkg_pkt_homepage = https://github.com/msantos/pkt
+pkg_pkt_fetch = git
+pkg_pkt_repo = https://github.com/msantos/pkt
+pkg_pkt_commit = master
+
+PACKAGES += plain_fsm
+pkg_plain_fsm_name = plain_fsm
+pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs.
+pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_fetch = git
+pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm
+pkg_plain_fsm_commit = master
+
+PACKAGES += plumtree
+pkg_plumtree_name = plumtree
+pkg_plumtree_description = Epidemic Broadcast Trees
+pkg_plumtree_homepage = https://github.com/helium/plumtree
+pkg_plumtree_fetch = git
+pkg_plumtree_repo = https://github.com/helium/plumtree
+pkg_plumtree_commit = master
+
+PACKAGES += pmod_transform
+pkg_pmod_transform_name = pmod_transform
+pkg_pmod_transform_description = Parse transform for parameterized modules
+pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_fetch = git
+pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform
+pkg_pmod_transform_commit = master
+
+PACKAGES += pobox
+pkg_pobox_name = pobox
+pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang
+pkg_pobox_homepage = https://github.com/ferd/pobox
+pkg_pobox_fetch = git
+pkg_pobox_repo = https://github.com/ferd/pobox
+pkg_pobox_commit = master
+
+PACKAGES += ponos
+pkg_ponos_name = ponos
+pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang
+pkg_ponos_homepage = https://github.com/klarna/ponos
+pkg_ponos_fetch = git
+pkg_ponos_repo = https://github.com/klarna/ponos
+pkg_ponos_commit = master
+
+PACKAGES += poolboy
+pkg_poolboy_name = poolboy
+pkg_poolboy_description = A hunky Erlang worker pool factory
+pkg_poolboy_homepage = https://github.com/devinus/poolboy
+pkg_poolboy_fetch = git
+pkg_poolboy_repo = https://github.com/devinus/poolboy
+pkg_poolboy_commit = master
+
+PACKAGES += pooler
+pkg_pooler_name = pooler
+pkg_pooler_description = An OTP Process Pool Application
+pkg_pooler_homepage = https://github.com/seth/pooler
+pkg_pooler_fetch = git
+pkg_pooler_repo = https://github.com/seth/pooler
+pkg_pooler_commit = master
+
+PACKAGES += pqueue
+pkg_pqueue_name = pqueue
+pkg_pqueue_description = Erlang Priority Queues
+pkg_pqueue_homepage = https://github.com/okeuday/pqueue
+pkg_pqueue_fetch = git
+pkg_pqueue_repo = https://github.com/okeuday/pqueue
+pkg_pqueue_commit = master
+
+PACKAGES += procket
+pkg_procket_name = procket
+pkg_procket_description = Erlang interface to low level socket operations
+pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket
+pkg_procket_fetch = git
+pkg_procket_repo = https://github.com/msantos/procket
+pkg_procket_commit = master
+
+PACKAGES += prometheus
+pkg_prometheus_name = prometheus
+pkg_prometheus_description = Prometheus.io client in Erlang
+pkg_prometheus_homepage = https://github.com/deadtrickster/prometheus.erl
+pkg_prometheus_fetch = git
+pkg_prometheus_repo = https://github.com/deadtrickster/prometheus.erl
+pkg_prometheus_commit = master
+
+PACKAGES += prop
+pkg_prop_name = prop
+pkg_prop_description = An Erlang code scaffolding and generator system.
+pkg_prop_homepage = https://github.com/nuex/prop
+pkg_prop_fetch = git
+pkg_prop_repo = https://github.com/nuex/prop
+pkg_prop_commit = master
+
+PACKAGES += proper
+pkg_proper_name = proper
+pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang.
+pkg_proper_homepage = http://proper.softlab.ntua.gr
+pkg_proper_fetch = git
+pkg_proper_repo = https://github.com/manopapad/proper
+pkg_proper_commit = master
+
+PACKAGES += props
+pkg_props_name = props
+pkg_props_description = Property structure library
+pkg_props_homepage = https://github.com/greyarea/props
+pkg_props_fetch = git
+pkg_props_repo = https://github.com/greyarea/props
+pkg_props_commit = master
+
+PACKAGES += protobuffs
+pkg_protobuffs_name = protobuffs
+pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs.
+pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_fetch = git
+pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs
+pkg_protobuffs_commit = master
+
+PACKAGES += psycho
+pkg_psycho_name = psycho
+pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware.
+pkg_psycho_homepage = https://github.com/gar1t/psycho
+pkg_psycho_fetch = git
+pkg_psycho_repo = https://github.com/gar1t/psycho
+pkg_psycho_commit = master
+
+PACKAGES += purity
+pkg_purity_name = purity
+pkg_purity_description = A side-effect analyzer for Erlang
+pkg_purity_homepage = https://github.com/mpitid/purity
+pkg_purity_fetch = git
+pkg_purity_repo = https://github.com/mpitid/purity
+pkg_purity_commit = master
+
+PACKAGES += push_service
+pkg_push_service_name = push_service
+pkg_push_service_description = Push service
+pkg_push_service_homepage = https://github.com/hairyhum/push_service
+pkg_push_service_fetch = git
+pkg_push_service_repo = https://github.com/hairyhum/push_service
+pkg_push_service_commit = master
+
+PACKAGES += qdate
+pkg_qdate_name = qdate
+pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang.
+pkg_qdate_homepage = https://github.com/choptastic/qdate
+pkg_qdate_fetch = git
+pkg_qdate_repo = https://github.com/choptastic/qdate
+pkg_qdate_commit = master
+
+PACKAGES += qrcode
+pkg_qrcode_name = qrcode
+pkg_qrcode_description = QR Code encoder in Erlang
+pkg_qrcode_homepage = https://github.com/komone/qrcode
+pkg_qrcode_fetch = git
+pkg_qrcode_repo = https://github.com/komone/qrcode
+pkg_qrcode_commit = master
+
+PACKAGES += quest
+pkg_quest_name = quest
+pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang.
+pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest
+pkg_quest_fetch = git
+pkg_quest_repo = https://github.com/eriksoe/ErlangQuest
+pkg_quest_commit = master
+
+PACKAGES += quickrand
+pkg_quickrand_name = quickrand
+pkg_quickrand_description = Quick Erlang Random Number Generation
+pkg_quickrand_homepage = https://github.com/okeuday/quickrand
+pkg_quickrand_fetch = git
+pkg_quickrand_repo = https://github.com/okeuday/quickrand
+pkg_quickrand_commit = master
+
+PACKAGES += rabbit
+pkg_rabbit_name = rabbit
+pkg_rabbit_description = RabbitMQ Server
+pkg_rabbit_homepage = https://www.rabbitmq.com/
+pkg_rabbit_fetch = git
+pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git
+pkg_rabbit_commit = master
+
+PACKAGES += rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak
+pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak
+pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_fetch = git
+pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange
+pkg_rabbit_exchange_type_riak_commit = master
+
+PACKAGES += rack
+pkg_rack_name = rack
+pkg_rack_description = Rack handler for erlang
+pkg_rack_homepage = https://github.com/erlyvideo/rack
+pkg_rack_fetch = git
+pkg_rack_repo = https://github.com/erlyvideo/rack
+pkg_rack_commit = master
+
+PACKAGES += radierl
+pkg_radierl_name = radierl
+pkg_radierl_description = RADIUS protocol stack implemented in Erlang.
+pkg_radierl_homepage = https://github.com/vances/radierl
+pkg_radierl_fetch = git
+pkg_radierl_repo = https://github.com/vances/radierl
+pkg_radierl_commit = master
+
+PACKAGES += rafter
+pkg_rafter_name = rafter
+pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol
+pkg_rafter_homepage = https://github.com/andrewjstone/rafter
+pkg_rafter_fetch = git
+pkg_rafter_repo = https://github.com/andrewjstone/rafter
+pkg_rafter_commit = master
+
+PACKAGES += ranch
+pkg_ranch_name = ranch
+pkg_ranch_description = Socket acceptor pool for TCP protocols.
+pkg_ranch_homepage = http://ninenines.eu
+pkg_ranch_fetch = git
+pkg_ranch_repo = https://github.com/ninenines/ranch
+pkg_ranch_commit = 1.2.1
+
+PACKAGES += rbeacon
+pkg_rbeacon_name = rbeacon
+pkg_rbeacon_description = LAN discovery and presence in Erlang.
+pkg_rbeacon_homepage = https://github.com/refuge/rbeacon
+pkg_rbeacon_fetch = git
+pkg_rbeacon_repo = https://github.com/refuge/rbeacon
+pkg_rbeacon_commit = master
+
+PACKAGES += rebar
+pkg_rebar_name = rebar
+pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases.
+pkg_rebar_homepage = http://www.rebar3.org
+pkg_rebar_fetch = git
+pkg_rebar_repo = https://github.com/rebar/rebar3
+pkg_rebar_commit = master
+
+PACKAGES += rebus
+pkg_rebus_name = rebus
+pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang.
+pkg_rebus_homepage = https://github.com/olle/rebus
+pkg_rebus_fetch = git
+pkg_rebus_repo = https://github.com/olle/rebus
+pkg_rebus_commit = master
+
+PACKAGES += rec2json
+pkg_rec2json_name = rec2json
+pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily.
+pkg_rec2json_homepage = https://github.com/lordnull/rec2json
+pkg_rec2json_fetch = git
+pkg_rec2json_repo = https://github.com/lordnull/rec2json
+pkg_rec2json_commit = master
+
+PACKAGES += recon
+pkg_recon_name = recon
+pkg_recon_description = Collection of functions and scripts to debug Erlang in production.
+pkg_recon_homepage = https://github.com/ferd/recon
+pkg_recon_fetch = git
+pkg_recon_repo = https://github.com/ferd/recon
+pkg_recon_commit = master
+
+PACKAGES += record_info
+pkg_record_info_name = record_info
+pkg_record_info_description = Convert between record and proplist
+pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_fetch = git
+pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info
+pkg_record_info_commit = master
+
+PACKAGES += redgrid
+pkg_redgrid_name = redgrid
+pkg_redgrid_description = automatic Erlang node discovery via redis
+pkg_redgrid_homepage = https://github.com/jkvor/redgrid
+pkg_redgrid_fetch = git
+pkg_redgrid_repo = https://github.com/jkvor/redgrid
+pkg_redgrid_commit = master
+
+PACKAGES += redo
+pkg_redo_name = redo
+pkg_redo_description = pipelined erlang redis client
+pkg_redo_homepage = https://github.com/jkvor/redo
+pkg_redo_fetch = git
+pkg_redo_repo = https://github.com/jkvor/redo
+pkg_redo_commit = master
+
+PACKAGES += reload_mk
+pkg_reload_mk_name = reload_mk
+pkg_reload_mk_description = Live reload plugin for erlang.mk.
+pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk
+pkg_reload_mk_fetch = git
+pkg_reload_mk_repo = https://github.com/bullno1/reload.mk
+pkg_reload_mk_commit = master
+
+PACKAGES += reltool_util
+pkg_reltool_util_name = reltool_util
+pkg_reltool_util_description = Erlang reltool utility functionality application
+pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util
+pkg_reltool_util_fetch = git
+pkg_reltool_util_repo = https://github.com/okeuday/reltool_util
+pkg_reltool_util_commit = master
+
+PACKAGES += relx
+pkg_relx_name = relx
+pkg_relx_description = Sane, simple release creation for Erlang
+pkg_relx_homepage = https://github.com/erlware/relx
+pkg_relx_fetch = git
+pkg_relx_repo = https://github.com/erlware/relx
+pkg_relx_commit = master
+
+PACKAGES += resource_discovery
+pkg_resource_discovery_name = resource_discovery
+pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster.
+pkg_resource_discovery_homepage = http://erlware.org/
+pkg_resource_discovery_fetch = git
+pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery
+pkg_resource_discovery_commit = master
+
+PACKAGES += restc
+pkg_restc_name = restc
+pkg_restc_description = Erlang Rest Client
+pkg_restc_homepage = https://github.com/kivra/restclient
+pkg_restc_fetch = git
+pkg_restc_repo = https://github.com/kivra/restclient
+pkg_restc_commit = master
+
+PACKAGES += rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc
+pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation.
+pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_fetch = git
+pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627
+pkg_rfc4627_jsonrpc_commit = master
+
+PACKAGES += riak_control
+pkg_riak_control_name = riak_control
+pkg_riak_control_description = Webmachine-based administration interface for Riak.
+pkg_riak_control_homepage = https://github.com/basho/riak_control
+pkg_riak_control_fetch = git
+pkg_riak_control_repo = https://github.com/basho/riak_control
+pkg_riak_control_commit = master
+
+PACKAGES += riak_core
+pkg_riak_core_name = riak_core
+pkg_riak_core_description = Distributed systems infrastructure used by Riak.
+pkg_riak_core_homepage = https://github.com/basho/riak_core
+pkg_riak_core_fetch = git
+pkg_riak_core_repo = https://github.com/basho/riak_core
+pkg_riak_core_commit = master
+
+PACKAGES += riak_dt
+pkg_riak_dt_name = riak_dt
+pkg_riak_dt_description = Convergent replicated datatypes in Erlang
+pkg_riak_dt_homepage = https://github.com/basho/riak_dt
+pkg_riak_dt_fetch = git
+pkg_riak_dt_repo = https://github.com/basho/riak_dt
+pkg_riak_dt_commit = master
+
+PACKAGES += riak_ensemble
+pkg_riak_ensemble_name = riak_ensemble
+pkg_riak_ensemble_description = Multi-Paxos framework in Erlang
+pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_fetch = git
+pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble
+pkg_riak_ensemble_commit = master
+
+PACKAGES += riak_kv
+pkg_riak_kv_name = riak_kv
+pkg_riak_kv_description = Riak Key/Value Store
+pkg_riak_kv_homepage = https://github.com/basho/riak_kv
+pkg_riak_kv_fetch = git
+pkg_riak_kv_repo = https://github.com/basho/riak_kv
+pkg_riak_kv_commit = master
+
+PACKAGES += riak_pg
+pkg_riak_pg_name = riak_pg
+pkg_riak_pg_description = Distributed process groups with riak_core.
+pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_fetch = git
+pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg
+pkg_riak_pg_commit = master
+
+PACKAGES += riak_pipe
+pkg_riak_pipe_name = riak_pipe
+pkg_riak_pipe_description = Riak Pipelines
+pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe
+pkg_riak_pipe_fetch = git
+pkg_riak_pipe_repo = https://github.com/basho/riak_pipe
+pkg_riak_pipe_commit = master
+
+PACKAGES += riak_sysmon
+pkg_riak_sysmon_name = riak_sysmon
+pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages
+pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_fetch = git
+pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon
+pkg_riak_sysmon_commit = master
+
+PACKAGES += riak_test
+pkg_riak_test_name = riak_test
+pkg_riak_test_description = I'm in your cluster, testing your riaks
+pkg_riak_test_homepage = https://github.com/basho/riak_test
+pkg_riak_test_fetch = git
+pkg_riak_test_repo = https://github.com/basho/riak_test
+pkg_riak_test_commit = master
+
+PACKAGES += riakc
+pkg_riakc_name = riakc
+pkg_riakc_description = Erlang clients for Riak.
+pkg_riakc_homepage = https://github.com/basho/riak-erlang-client
+pkg_riakc_fetch = git
+pkg_riakc_repo = https://github.com/basho/riak-erlang-client
+pkg_riakc_commit = master
+
+PACKAGES += riakhttpc
+pkg_riakhttpc_name = riakhttpc
+pkg_riakhttpc_description = Riak Erlang client using the HTTP interface
+pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_fetch = git
+pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client
+pkg_riakhttpc_commit = master
+
+PACKAGES += riaknostic
+pkg_riaknostic_name = riaknostic
+pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap
+pkg_riaknostic_homepage = https://github.com/basho/riaknostic
+pkg_riaknostic_fetch = git
+pkg_riaknostic_repo = https://github.com/basho/riaknostic
+pkg_riaknostic_commit = master
+
+PACKAGES += riakpool
+pkg_riakpool_name = riakpool
+pkg_riakpool_description = erlang riak client pool
+pkg_riakpool_homepage = https://github.com/dweldon/riakpool
+pkg_riakpool_fetch = git
+pkg_riakpool_repo = https://github.com/dweldon/riakpool
+pkg_riakpool_commit = master
+
+PACKAGES += rivus_cep
+pkg_rivus_cep_name = rivus_cep
+pkg_rivus_cep_description = Complex event processing in Erlang
+pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_fetch = git
+pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep
+pkg_rivus_cep_commit = master
+
+PACKAGES += rlimit
+pkg_rlimit_name = rlimit
+pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent
+pkg_rlimit_homepage = https://github.com/jlouis/rlimit
+pkg_rlimit_fetch = git
+pkg_rlimit_repo = https://github.com/jlouis/rlimit
+pkg_rlimit_commit = master
+
+PACKAGES += rust_mk
+pkg_rust_mk_name = rust_mk
+pkg_rust_mk_description = Build Rust crates in an Erlang application
+pkg_rust_mk_homepage = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_fetch = git
+pkg_rust_mk_repo = https://github.com/goertzenator/rust.mk
+pkg_rust_mk_commit = master
+
+PACKAGES += safetyvalve
+pkg_safetyvalve_name = safetyvalve
+pkg_safetyvalve_description = A safety valve for your erlang node
+pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_fetch = git
+pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve
+pkg_safetyvalve_commit = master
+
+PACKAGES += seestar
+pkg_seestar_name = seestar
+pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol
+pkg_seestar_homepage = https://github.com/iamaleksey/seestar
+pkg_seestar_fetch = git
+pkg_seestar_repo = https://github.com/iamaleksey/seestar
+pkg_seestar_commit = master
+
+PACKAGES += service
+pkg_service_name = service
+pkg_service_description = A minimal Erlang behavior for creating CloudI internal services
+pkg_service_homepage = http://cloudi.org/
+pkg_service_fetch = git
+pkg_service_repo = https://github.com/CloudI/service
+pkg_service_commit = master
+
+PACKAGES += setup
+pkg_setup_name = setup
+pkg_setup_description = Generic setup utility for Erlang-based systems
+pkg_setup_homepage = https://github.com/uwiger/setup
+pkg_setup_fetch = git
+pkg_setup_repo = https://github.com/uwiger/setup
+pkg_setup_commit = master
+
+PACKAGES += sext
+pkg_sext_name = sext
+pkg_sext_description = Sortable Erlang Term Serialization
+pkg_sext_homepage = https://github.com/uwiger/sext
+pkg_sext_fetch = git
+pkg_sext_repo = https://github.com/uwiger/sext
+pkg_sext_commit = master
+
+PACKAGES += sfmt
+pkg_sfmt_name = sfmt
+pkg_sfmt_description = SFMT pseudo random number generator for Erlang.
+pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_fetch = git
+pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang
+pkg_sfmt_commit = master
+
+PACKAGES += sgte
+pkg_sgte_name = sgte
+pkg_sgte_description = A simple Erlang Template Engine
+pkg_sgte_homepage = https://github.com/filippo/sgte
+pkg_sgte_fetch = git
+pkg_sgte_repo = https://github.com/filippo/sgte
+pkg_sgte_commit = master
+
+PACKAGES += sheriff
+pkg_sheriff_name = sheriff
+pkg_sheriff_description = Parse transform for type based validation.
+pkg_sheriff_homepage = http://ninenines.eu
+pkg_sheriff_fetch = git
+pkg_sheriff_repo = https://github.com/extend/sheriff
+pkg_sheriff_commit = master
+
+PACKAGES += shotgun
+pkg_shotgun_name = shotgun
+pkg_shotgun_description = better than just a gun
+pkg_shotgun_homepage = https://github.com/inaka/shotgun
+pkg_shotgun_fetch = git
+pkg_shotgun_repo = https://github.com/inaka/shotgun
+pkg_shotgun_commit = master
+
+PACKAGES += sidejob
+pkg_sidejob_name = sidejob
+pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang
+pkg_sidejob_homepage = https://github.com/basho/sidejob
+pkg_sidejob_fetch = git
+pkg_sidejob_repo = https://github.com/basho/sidejob
+pkg_sidejob_commit = master
+
+PACKAGES += sieve
+pkg_sieve_name = sieve
+pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang
+pkg_sieve_homepage = https://github.com/benoitc/sieve
+pkg_sieve_fetch = git
+pkg_sieve_repo = https://github.com/benoitc/sieve
+pkg_sieve_commit = master
+
+PACKAGES += sighandler
+pkg_sighandler_name = sighandler
+pkg_sighandler_description = Handle UNIX signals in Er lang
+pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler
+pkg_sighandler_fetch = git
+pkg_sighandler_repo = https://github.com/jkingsbery/sighandler
+pkg_sighandler_commit = master
+
+PACKAGES += simhash
+pkg_simhash_name = simhash
+pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data.
+pkg_simhash_homepage = https://github.com/ferd/simhash
+pkg_simhash_fetch = git
+pkg_simhash_repo = https://github.com/ferd/simhash
+pkg_simhash_commit = master
+
+PACKAGES += simple_bridge
+pkg_simple_bridge_name = simple_bridge
+pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers.
+pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_fetch = git
+pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge
+pkg_simple_bridge_commit = master
+
+PACKAGES += simple_oauth2
+pkg_simple_oauth2_name = simple_oauth2
+pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured)
+pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_fetch = git
+pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2
+pkg_simple_oauth2_commit = master
+
+PACKAGES += skel
+pkg_skel_name = skel
+pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang
+pkg_skel_homepage = https://github.com/ParaPhrase/skel
+pkg_skel_fetch = git
+pkg_skel_repo = https://github.com/ParaPhrase/skel
+pkg_skel_commit = master
+
+PACKAGES += slack
+pkg_slack_name = slack
+pkg_slack_description = Minimal slack notification OTP library.
+pkg_slack_homepage = https://github.com/DonBranson/slack
+pkg_slack_fetch = git
+pkg_slack_repo = https://github.com/DonBranson/slack.git
+pkg_slack_commit = master
+
+PACKAGES += smother
+pkg_smother_name = smother
+pkg_smother_description = Extended code coverage metrics for Erlang.
+pkg_smother_homepage = https://ramsay-t.github.io/Smother/
+pkg_smother_fetch = git
+pkg_smother_repo = https://github.com/ramsay-t/Smother
+pkg_smother_commit = master
+
+PACKAGES += snappyer
+pkg_snappyer_name = snappyer
+pkg_snappyer_description = Snappy as nif for Erlang
+pkg_snappyer_homepage = https://github.com/zmstone/snappyer
+pkg_snappyer_fetch = git
+pkg_snappyer_repo = https://github.com/zmstone/snappyer.git
+pkg_snappyer_commit = master
+
+PACKAGES += social
+pkg_social_name = social
+pkg_social_description = Cowboy handler for social login via OAuth2 providers
+pkg_social_homepage = https://github.com/dvv/social
+pkg_social_fetch = git
+pkg_social_repo = https://github.com/dvv/social
+pkg_social_commit = master
+
+PACKAGES += spapi_router
+pkg_spapi_router_name = spapi_router
+pkg_spapi_router_description = Partially-connected Erlang clustering
+pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router
+pkg_spapi_router_fetch = git
+pkg_spapi_router_repo = https://github.com/spilgames/spapi-router
+pkg_spapi_router_commit = master
+
+PACKAGES += sqerl
+pkg_sqerl_name = sqerl
+pkg_sqerl_description = An Erlang-flavoured SQL DSL
+pkg_sqerl_homepage = https://github.com/hairyhum/sqerl
+pkg_sqerl_fetch = git
+pkg_sqerl_repo = https://github.com/hairyhum/sqerl
+pkg_sqerl_commit = master
+
+PACKAGES += srly
+pkg_srly_name = srly
+pkg_srly_description = Native Erlang Unix serial interface
+pkg_srly_homepage = https://github.com/msantos/srly
+pkg_srly_fetch = git
+pkg_srly_repo = https://github.com/msantos/srly
+pkg_srly_commit = master
+
+PACKAGES += sshrpc
+pkg_sshrpc_name = sshrpc
+pkg_sshrpc_description = Erlang SSH RPC module (experimental)
+pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_fetch = git
+pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc
+pkg_sshrpc_commit = master
+
+PACKAGES += stable
+pkg_stable_name = stable
+pkg_stable_description = Library of assorted helpers for Cowboy web server.
+pkg_stable_homepage = https://github.com/dvv/stable
+pkg_stable_fetch = git
+pkg_stable_repo = https://github.com/dvv/stable
+pkg_stable_commit = master
+
+PACKAGES += statebox
+pkg_statebox_name = statebox
+pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak.
+pkg_statebox_homepage = https://github.com/mochi/statebox
+pkg_statebox_fetch = git
+pkg_statebox_repo = https://github.com/mochi/statebox
+pkg_statebox_commit = master
+
+PACKAGES += statebox_riak
+pkg_statebox_riak_name = statebox_riak
+pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media.
+pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_fetch = git
+pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak
+pkg_statebox_riak_commit = master
+
+PACKAGES += statman
+pkg_statman_name = statman
+pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM
+pkg_statman_homepage = https://github.com/knutin/statman
+pkg_statman_fetch = git
+pkg_statman_repo = https://github.com/knutin/statman
+pkg_statman_commit = master
+
+PACKAGES += statsderl
+pkg_statsderl_name = statsderl
+pkg_statsderl_description = StatsD client (erlang)
+pkg_statsderl_homepage = https://github.com/lpgauth/statsderl
+pkg_statsderl_fetch = git
+pkg_statsderl_repo = https://github.com/lpgauth/statsderl
+pkg_statsderl_commit = master
+
+PACKAGES += stdinout_pool
+pkg_stdinout_pool_name = stdinout_pool
+pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication.
+pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_fetch = git
+pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool
+pkg_stdinout_pool_commit = master
+
+PACKAGES += stockdb
+pkg_stockdb_name = stockdb
+pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang
+pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb
+pkg_stockdb_fetch = git
+pkg_stockdb_repo = https://github.com/maxlapshin/stockdb
+pkg_stockdb_commit = master
+
+PACKAGES += stripe
+pkg_stripe_name = stripe
+pkg_stripe_description = Erlang interface to the stripe.com API
+pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang
+pkg_stripe_fetch = git
+pkg_stripe_repo = https://github.com/mattsta/stripe-erlang
+pkg_stripe_commit = v1
+
+PACKAGES += subproc
+pkg_subproc_name = subproc
+pkg_subproc_description = unix subprocess manager with {active,once|false} modes
+pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc
+pkg_subproc_fetch = git
+pkg_subproc_repo = https://github.com/dozzie/subproc
+pkg_subproc_commit = v0.1.0
+
+PACKAGES += supervisor3
+pkg_supervisor3_name = supervisor3
+pkg_supervisor3_description = OTP supervisor with additional strategies
+pkg_supervisor3_homepage = https://github.com/klarna/supervisor3
+pkg_supervisor3_fetch = git
+pkg_supervisor3_repo = https://github.com/klarna/supervisor3.git
+pkg_supervisor3_commit = master
+
+PACKAGES += surrogate
+pkg_surrogate_name = surrogate
+pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes.
+pkg_surrogate_homepage = https://github.com/skruger/Surrogate
+pkg_surrogate_fetch = git
+pkg_surrogate_repo = https://github.com/skruger/Surrogate
+pkg_surrogate_commit = master
+
+PACKAGES += swab
+pkg_swab_name = swab
+pkg_swab_description = General purpose buffer handling module
+pkg_swab_homepage = https://github.com/crownedgrouse/swab
+pkg_swab_fetch = git
+pkg_swab_repo = https://github.com/crownedgrouse/swab
+pkg_swab_commit = master
+
+PACKAGES += swarm
+pkg_swarm_name = swarm
+pkg_swarm_description = Fast and simple acceptor pool for Erlang
+pkg_swarm_homepage = https://github.com/jeremey/swarm
+pkg_swarm_fetch = git
+pkg_swarm_repo = https://github.com/jeremey/swarm
+pkg_swarm_commit = master
+
+PACKAGES += switchboard
+pkg_switchboard_name = switchboard
+pkg_switchboard_description = A framework for processing email using worker plugins.
+pkg_switchboard_homepage = https://github.com/thusfresh/switchboard
+pkg_switchboard_fetch = git
+pkg_switchboard_repo = https://github.com/thusfresh/switchboard
+pkg_switchboard_commit = master
+
+PACKAGES += syn
+pkg_syn_name = syn
+pkg_syn_description = A global Process Registry and Process Group manager for Erlang.
+pkg_syn_homepage = https://github.com/ostinelli/syn
+pkg_syn_fetch = git
+pkg_syn_repo = https://github.com/ostinelli/syn
+pkg_syn_commit = master
+
+PACKAGES += sync
+pkg_sync_name = sync
+pkg_sync_description = On-the-fly recompiling and reloading in Erlang.
+pkg_sync_homepage = https://github.com/rustyio/sync
+pkg_sync_fetch = git
+pkg_sync_repo = https://github.com/rustyio/sync
+pkg_sync_commit = master
+
+PACKAGES += syntaxerl
+pkg_syntaxerl_name = syntaxerl
+pkg_syntaxerl_description = Syntax checker for Erlang
+pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_fetch = git
+pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl
+pkg_syntaxerl_commit = master
+
+PACKAGES += syslog
+pkg_syslog_name = syslog
+pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3)
+pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_fetch = git
+pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog
+pkg_syslog_commit = master
+
+PACKAGES += taskforce
+pkg_taskforce_name = taskforce
+pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks.
+pkg_taskforce_homepage = https://github.com/g-andrade/taskforce
+pkg_taskforce_fetch = git
+pkg_taskforce_repo = https://github.com/g-andrade/taskforce
+pkg_taskforce_commit = master
+
+PACKAGES += tddreloader
+pkg_tddreloader_name = tddreloader
+pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes
+pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader
+pkg_tddreloader_fetch = git
+pkg_tddreloader_repo = https://github.com/version2beta/tddreloader
+pkg_tddreloader_commit = master
+
+PACKAGES += tempo
+pkg_tempo_name = tempo
+pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang.
+pkg_tempo_homepage = https://github.com/selectel/tempo
+pkg_tempo_fetch = git
+pkg_tempo_repo = https://github.com/selectel/tempo
+pkg_tempo_commit = master
+
+PACKAGES += ticktick
+pkg_ticktick_name = ticktick
+pkg_ticktick_description = Ticktick is an id generator for message service.
+pkg_ticktick_homepage = https://github.com/ericliang/ticktick
+pkg_ticktick_fetch = git
+pkg_ticktick_repo = https://github.com/ericliang/ticktick
+pkg_ticktick_commit = master
+
+PACKAGES += tinymq
+pkg_tinymq_name = tinymq
+pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue
+pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_fetch = git
+pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq
+pkg_tinymq_commit = master
+
+PACKAGES += tinymt
+pkg_tinymt_name = tinymt
+pkg_tinymt_description = TinyMT pseudo random number generator for Erlang.
+pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_fetch = git
+pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang
+pkg_tinymt_commit = master
+
+PACKAGES += tirerl
+pkg_tirerl_name = tirerl
+pkg_tirerl_description = Erlang interface to Elastic Search
+pkg_tirerl_homepage = https://github.com/inaka/tirerl
+pkg_tirerl_fetch = git
+pkg_tirerl_repo = https://github.com/inaka/tirerl
+pkg_tirerl_commit = master
+
+PACKAGES += toml
+pkg_toml_name = toml
+pkg_toml_description = TOML (0.4.0) config parser
+pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML
+pkg_toml_fetch = git
+pkg_toml_repo = https://github.com/dozzie/toml
+pkg_toml_commit = v0.2.0
+
+PACKAGES += traffic_tools
+pkg_traffic_tools_name = traffic_tools
+pkg_traffic_tools_description = Simple traffic limiting library
+pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools
+pkg_traffic_tools_fetch = git
+pkg_traffic_tools_repo = https://github.com/systra/traffic_tools
+pkg_traffic_tools_commit = master
+
+PACKAGES += trails
+pkg_trails_name = trails
+pkg_trails_description = A couple of improvements over Cowboy Routes
+pkg_trails_homepage = http://inaka.github.io/cowboy-trails/
+pkg_trails_fetch = git
+pkg_trails_repo = https://github.com/inaka/cowboy-trails
+pkg_trails_commit = master
+
+PACKAGES += trane
+pkg_trane_name = trane
+pkg_trane_description = SAX style broken HTML parser in Erlang
+pkg_trane_homepage = https://github.com/massemanet/trane
+pkg_trane_fetch = git
+pkg_trane_repo = https://github.com/massemanet/trane
+pkg_trane_commit = master
+
+PACKAGES += transit
+pkg_transit_name = transit
+pkg_transit_description = transit format for erlang
+pkg_transit_homepage = https://github.com/isaiah/transit-erlang
+pkg_transit_fetch = git
+pkg_transit_repo = https://github.com/isaiah/transit-erlang
+pkg_transit_commit = master
+
+PACKAGES += trie
+pkg_trie_name = trie
+pkg_trie_description = Erlang Trie Implementation
+pkg_trie_homepage = https://github.com/okeuday/trie
+pkg_trie_fetch = git
+pkg_trie_repo = https://github.com/okeuday/trie
+pkg_trie_commit = master
+
+PACKAGES += triq
+pkg_triq_name = triq
+pkg_triq_description = Trifork QuickCheck
+pkg_triq_homepage = https://triq.gitlab.io
+pkg_triq_fetch = git
+pkg_triq_repo = https://gitlab.com/triq/triq.git
+pkg_triq_commit = master
+
+PACKAGES += tunctl
+pkg_tunctl_name = tunctl
+pkg_tunctl_description = Erlang TUN/TAP interface
+pkg_tunctl_homepage = https://github.com/msantos/tunctl
+pkg_tunctl_fetch = git
+pkg_tunctl_repo = https://github.com/msantos/tunctl
+pkg_tunctl_commit = master
+
+PACKAGES += twerl
+pkg_twerl_name = twerl
+pkg_twerl_description = Erlang client for the Twitter Streaming API
+pkg_twerl_homepage = https://github.com/lucaspiller/twerl
+pkg_twerl_fetch = git
+pkg_twerl_repo = https://github.com/lucaspiller/twerl
+pkg_twerl_commit = oauth
+
+PACKAGES += twitter_erlang
+pkg_twitter_erlang_name = twitter_erlang
+pkg_twitter_erlang_description = An Erlang twitter client
+pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_fetch = git
+pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter
+pkg_twitter_erlang_commit = master
+
+PACKAGES += ucol_nif
+pkg_ucol_nif_name = ucol_nif
+pkg_ucol_nif_description = ICU based collation Erlang module
+pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_fetch = git
+pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif
+pkg_ucol_nif_commit = master
+
+PACKAGES += unicorn
+pkg_unicorn_name = unicorn
+pkg_unicorn_description = Generic configuration server
+pkg_unicorn_homepage = https://github.com/shizzard/unicorn
+pkg_unicorn_fetch = git
+pkg_unicorn_repo = https://github.com/shizzard/unicorn
+pkg_unicorn_commit = master
+
+PACKAGES += unsplit
+pkg_unsplit_name = unsplit
+pkg_unsplit_description = Resolves conflicts in Mnesia after network splits
+pkg_unsplit_homepage = https://github.com/uwiger/unsplit
+pkg_unsplit_fetch = git
+pkg_unsplit_repo = https://github.com/uwiger/unsplit
+pkg_unsplit_commit = master
+
+PACKAGES += uuid
+pkg_uuid_name = uuid
+pkg_uuid_description = Erlang UUID Implementation
+pkg_uuid_homepage = https://github.com/okeuday/uuid
+pkg_uuid_fetch = git
+pkg_uuid_repo = https://github.com/okeuday/uuid
+pkg_uuid_commit = master
+
+PACKAGES += ux
+pkg_ux_name = ux
+pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation)
+pkg_ux_homepage = https://github.com/erlang-unicode/ux
+pkg_ux_fetch = git
+pkg_ux_repo = https://github.com/erlang-unicode/ux
+pkg_ux_commit = master
+
+PACKAGES += vert
+pkg_vert_name = vert
+pkg_vert_description = Erlang binding to libvirt virtualization API
+pkg_vert_homepage = https://github.com/msantos/erlang-libvirt
+pkg_vert_fetch = git
+pkg_vert_repo = https://github.com/msantos/erlang-libvirt
+pkg_vert_commit = master
+
+PACKAGES += verx
+pkg_verx_name = verx
+pkg_verx_description = Erlang implementation of the libvirtd remote protocol
+pkg_verx_homepage = https://github.com/msantos/verx
+pkg_verx_fetch = git
+pkg_verx_repo = https://github.com/msantos/verx
+pkg_verx_commit = master
+
+PACKAGES += vmq_acl
+pkg_vmq_acl_name = vmq_acl
+pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_acl_homepage = https://verne.mq/
+pkg_vmq_acl_fetch = git
+pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl
+pkg_vmq_acl_commit = master
+
+PACKAGES += vmq_bridge
+pkg_vmq_bridge_name = vmq_bridge
+pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_bridge_homepage = https://verne.mq/
+pkg_vmq_bridge_fetch = git
+pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge
+pkg_vmq_bridge_commit = master
+
+PACKAGES += vmq_graphite
+pkg_vmq_graphite_name = vmq_graphite
+pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_graphite_homepage = https://verne.mq/
+pkg_vmq_graphite_fetch = git
+pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite
+pkg_vmq_graphite_commit = master
+
+PACKAGES += vmq_passwd
+pkg_vmq_passwd_name = vmq_passwd
+pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_passwd_homepage = https://verne.mq/
+pkg_vmq_passwd_fetch = git
+pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd
+pkg_vmq_passwd_commit = master
+
+PACKAGES += vmq_server
+pkg_vmq_server_name = vmq_server
+pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_server_homepage = https://verne.mq/
+pkg_vmq_server_fetch = git
+pkg_vmq_server_repo = https://github.com/erlio/vmq_server
+pkg_vmq_server_commit = master
+
+PACKAGES += vmq_snmp
+pkg_vmq_snmp_name = vmq_snmp
+pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_snmp_homepage = https://verne.mq/
+pkg_vmq_snmp_fetch = git
+pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp
+pkg_vmq_snmp_commit = master
+
+PACKAGES += vmq_systree
+pkg_vmq_systree_name = vmq_systree
+pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker
+pkg_vmq_systree_homepage = https://verne.mq/
+pkg_vmq_systree_fetch = git
+pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree
+pkg_vmq_systree_commit = master
+
+PACKAGES += vmstats
+pkg_vmstats_name = vmstats
+pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs.
+pkg_vmstats_homepage = https://github.com/ferd/vmstats
+pkg_vmstats_fetch = git
+pkg_vmstats_repo = https://github.com/ferd/vmstats
+pkg_vmstats_commit = master
+
+PACKAGES += walrus
+pkg_walrus_name = walrus
+pkg_walrus_description = Walrus - Mustache-like Templating
+pkg_walrus_homepage = https://github.com/devinus/walrus
+pkg_walrus_fetch = git
+pkg_walrus_repo = https://github.com/devinus/walrus
+pkg_walrus_commit = master
+
+PACKAGES += webmachine
+pkg_webmachine_name = webmachine
+pkg_webmachine_description = A REST-based system for building web applications.
+pkg_webmachine_homepage = https://github.com/basho/webmachine
+pkg_webmachine_fetch = git
+pkg_webmachine_repo = https://github.com/basho/webmachine
+pkg_webmachine_commit = master
+
+PACKAGES += websocket_client
+pkg_websocket_client_name = websocket_client
+pkg_websocket_client_description = Erlang websocket client (ws and wss supported)
+pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_fetch = git
+pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client
+pkg_websocket_client_commit = master
+
+PACKAGES += worker_pool
+pkg_worker_pool_name = worker_pool
+pkg_worker_pool_description = a simple erlang worker pool
+pkg_worker_pool_homepage = https://github.com/inaka/worker_pool
+pkg_worker_pool_fetch = git
+pkg_worker_pool_repo = https://github.com/inaka/worker_pool
+pkg_worker_pool_commit = master
+
+PACKAGES += wrangler
+pkg_wrangler_name = wrangler
+pkg_wrangler_description = Import of the Wrangler svn repository.
+pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html
+pkg_wrangler_fetch = git
+pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler
+pkg_wrangler_commit = master
+
+PACKAGES += wsock
+pkg_wsock_name = wsock
+pkg_wsock_description = Erlang library to build WebSocket clients and servers
+pkg_wsock_homepage = https://github.com/madtrick/wsock
+pkg_wsock_fetch = git
+pkg_wsock_repo = https://github.com/madtrick/wsock
+pkg_wsock_commit = master
+
+PACKAGES += xhttpc
+pkg_xhttpc_name = xhttpc
+pkg_xhttpc_description = Extensible HTTP Client for Erlang
+pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc
+pkg_xhttpc_fetch = git
+pkg_xhttpc_repo = https://github.com/seriyps/xhttpc
+pkg_xhttpc_commit = master
+
+PACKAGES += xref_runner
+pkg_xref_runner_name = xref_runner
+pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref)
+pkg_xref_runner_homepage = https://github.com/inaka/xref_runner
+pkg_xref_runner_fetch = git
+pkg_xref_runner_repo = https://github.com/inaka/xref_runner
+pkg_xref_runner_commit = master
+
+PACKAGES += yamerl
+pkg_yamerl_name = yamerl
+pkg_yamerl_description = YAML 1.2 parser in pure Erlang
+pkg_yamerl_homepage = https://github.com/yakaz/yamerl
+pkg_yamerl_fetch = git
+pkg_yamerl_repo = https://github.com/yakaz/yamerl
+pkg_yamerl_commit = master
+
+PACKAGES += yamler
+pkg_yamler_name = yamler
+pkg_yamler_description = libyaml-based yaml loader for Erlang
+pkg_yamler_homepage = https://github.com/goertzenator/yamler
+pkg_yamler_fetch = git
+pkg_yamler_repo = https://github.com/goertzenator/yamler
+pkg_yamler_commit = master
+
+PACKAGES += yaws
+pkg_yaws_name = yaws
+pkg_yaws_description = Yaws webserver
+pkg_yaws_homepage = http://yaws.hyber.org
+pkg_yaws_fetch = git
+pkg_yaws_repo = https://github.com/klacke/yaws
+pkg_yaws_commit = master
+
+PACKAGES += zab_engine
+pkg_zab_engine_name = zab_engine
+pkg_zab_engine_description = zab propotocol implement by erlang
+pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_fetch = git
+pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine
+pkg_zab_engine_commit = master
+
+PACKAGES += zabbix_sender
+pkg_zabbix_sender_name = zabbix_sender
+pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang
+pkg_zabbix_sender_homepage = https://github.com/stalkermn/zabbix_sender
+pkg_zabbix_sender_fetch = git
+pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git
+pkg_zabbix_sender_commit = master
+
+PACKAGES += zeta
+pkg_zeta_name = zeta
+pkg_zeta_description = HTTP access log parser in Erlang
+pkg_zeta_homepage = https://github.com/s1n4/zeta
+pkg_zeta_fetch = git
+pkg_zeta_repo = https://github.com/s1n4/zeta
+pkg_zeta_commit = master
+
+PACKAGES += zippers
+pkg_zippers_name = zippers
+pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers
+pkg_zippers_homepage = https://github.com/ferd/zippers
+pkg_zippers_fetch = git
+pkg_zippers_repo = https://github.com/ferd/zippers
+pkg_zippers_commit = master
+
+PACKAGES += zlists
+pkg_zlists_name = zlists
+pkg_zlists_description = Erlang lazy lists library.
+pkg_zlists_homepage = https://github.com/vjache/erlang-zlists
+pkg_zlists_fetch = git
+pkg_zlists_repo = https://github.com/vjache/erlang-zlists
+pkg_zlists_commit = master
+
+PACKAGES += zraft_lib
+pkg_zraft_lib_name = zraft_lib
+pkg_zraft_lib_description = Erlang raft consensus protocol implementation
+pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_fetch = git
+pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib
+pkg_zraft_lib_commit = master
+
+PACKAGES += zucchini
+pkg_zucchini_name = zucchini
+pkg_zucchini_description = An Erlang INI parser
+pkg_zucchini_homepage = https://github.com/devinus/zucchini
+pkg_zucchini_fetch = git
+pkg_zucchini_repo = https://github.com/devinus/zucchini
+pkg_zucchini_commit = master
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: search
+
+define pkg_print
+ $(verbose) printf "%s\n" \
+ $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \
+ "App name: $(pkg_$(1)_name)" \
+ "Description: $(pkg_$(1)_description)" \
+ "Home page: $(pkg_$(1)_homepage)" \
+ "Fetch with: $(pkg_$(1)_fetch)" \
+ "Repository: $(pkg_$(1)_repo)" \
+ "Commit: $(pkg_$(1)_commit)" \
+ ""
+
+endef
+
+search:
+ifdef q
+ $(foreach p,$(PACKAGES), \
+ $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \
+ $(call pkg_print,$(p))))
+else
+ $(foreach p,$(PACKAGES),$(call pkg_print,$(p)))
+endif
+
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-deps clean-tmp-deps.log
+
+# Configuration.
+
+ifdef OTP_DEPS
+$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.)
+endif
+
+IGNORE_DEPS ?=
+export IGNORE_DEPS
+
+APPS_DIR ?= $(CURDIR)/apps
+export APPS_DIR
+
+DEPS_DIR ?= $(CURDIR)/deps
+export DEPS_DIR
+
+REBAR_DEPS_DIR = $(DEPS_DIR)
+export REBAR_DEPS_DIR
+
+REBAR_GIT ?= https://github.com/rebar/rebar
+REBAR_COMMIT ?= 576e12171ab8d69b048b827b92aa65d067deea01
+
+# External "early" plugins (see core/plugins.mk for regular plugins).
+# They both use the core_dep_plugin macro.
+
+define core_dep_plugin
+ifeq ($(2),$(PROJECT))
+-include $$(patsubst $(PROJECT)/%,%,$(1))
+else
+-include $(DEPS_DIR)/$(1)
+
+$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ;
+endif
+endef
+
+DEP_EARLY_PLUGINS ?=
+
+$(foreach p,$(DEP_EARLY_PLUGINS),\
+ $(eval $(if $(findstring /,$p),\
+ $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+ $(call core_dep_plugin,$p/early-plugins.mk,$p))))
+
+# Query functions.
+
+query_fetch_method = $(if $(dep_$(1)),$(call _qfm_dep,$(word 1,$(dep_$(1)))),$(call _qfm_pkg,$(1)))
+_qfm_dep = $(if $(dep_fetch_$(1)),$(1),$(if $(IS_DEP),legacy,fail))
+_qfm_pkg = $(if $(pkg_$(1)_fetch),$(pkg_$(1)_fetch),fail)
+
+query_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1)))
+
+query_repo = $(call _qr,$(1),$(call query_fetch_method,$(1)))
+_qr = $(if $(query_repo_$(2)),$(call query_repo_$(2),$(1)),$(call dep_repo,$(1)))
+
+query_repo_default = $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))
+query_repo_git = $(patsubst git://github.com/%,https://github.com/%,$(call query_repo_default,$(1)))
+query_repo_git-subfolder = $(call query_repo_git,$(1))
+query_repo_git-submodule = -
+query_repo_hg = $(call query_repo_default,$(1))
+query_repo_svn = $(call query_repo_default,$(1))
+query_repo_cp = $(call query_repo_default,$(1))
+query_repo_ln = $(call query_repo_default,$(1))
+query_repo_hex = https://hex.pm/packages/$(if $(word 3,$(dep_$(1))),$(word 3,$(dep_$(1))),$(1))
+query_repo_fail = -
+query_repo_legacy = -
+
+query_version = $(call _qv,$(1),$(call query_fetch_method,$(1)))
+_qv = $(if $(query_version_$(2)),$(call query_version_$(2),$(1)),$(call dep_commit,$(1)))
+
+query_version_default = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit)))
+query_version_git = $(call query_version_default,$(1))
+query_version_git-subfolder = $(call query_version_git,$(1))
+query_version_git-submodule = -
+query_version_hg = $(call query_version_default,$(1))
+query_version_svn = -
+query_version_cp = -
+query_version_ln = -
+query_version_hex = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_commit)))
+query_version_fail = -
+query_version_legacy = -
+
+query_extra = $(call _qe,$(1),$(call query_fetch_method,$(1)))
+_qe = $(if $(query_extra_$(2)),$(call query_extra_$(2),$(1)),-)
+
+query_extra_git = -
+query_extra_git-subfolder = $(if $(dep_$(1)),subfolder=$(word 4,$(dep_$(1))),-)
+query_extra_git-submodule = -
+query_extra_hg = -
+query_extra_svn = -
+query_extra_cp = -
+query_extra_ln = -
+query_extra_hex = $(if $(dep_$(1)),package-name=$(word 3,$(dep_$(1))),-)
+query_extra_fail = -
+query_extra_legacy = -
+
+query_absolute_path = $(addprefix $(DEPS_DIR)/,$(call query_name,$(1)))
+
+# Deprecated legacy query functions.
+dep_fetch = $(call query_fetch_method,$(1))
+dep_name = $(call query_name,$(1))
+dep_repo = $(call query_repo_git,$(1))
+dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(if $(filter hex,$(word 1,$(dep_$(1)))),$(word 2,$(dep_$(1))),$(word 3,$(dep_$(1)))),$(pkg_$(1)_commit)))
+
+LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a)))
+ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep))))
+
+# When we are calling an app directly we don't want to include it here
+# otherwise it'll be treated both as an apps and a top-level project.
+ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d)))
+ifdef ROOT_DIR
+ifndef IS_APP
+ALL_APPS_DIRS := $(filter-out $(APPS_DIR)/$(notdir $(CURDIR)),$(ALL_APPS_DIRS))
+endif
+endif
+
+ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),)
+ifeq ($(ERL_LIBS),)
+ ERL_LIBS = $(APPS_DIR):$(DEPS_DIR)
+else
+ ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR)
+endif
+endif
+export ERL_LIBS
+
+export NO_AUTOPATCH
+
+# Verbosity.
+
+dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))";
+dep_verbose_2 = set -x;
+dep_verbose = $(dep_verbose_$(V))
+
+# Optimization: don't recompile deps unless truly necessary.
+
+ifndef IS_DEP
+ifneq ($(MAKELEVEL),0)
+$(shell rm -f ebin/dep_built)
+endif
+endif
+
+# Core targets.
+
+ALL_APPS_DIRS_TO_BUILD = $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS))
+
+apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | $(ERLANG_MK_TMP)
+# Create ebin directory for all apps to make sure Erlang recognizes them
+# as proper OTP applications when using -include_lib. This is a temporary
+# fix, a proper fix would be to compile apps/* in the right order.
+ifndef IS_APP
+ifneq ($(ALL_APPS_DIRS),)
+ $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
+ mkdir -p $$dep/ebin; \
+ done
+endif
+endif
+# At the toplevel: if LOCAL_DEPS is defined with at least one local app, only
+# compile that list of apps. Otherwise, compile everything.
+# Within an app: compile all LOCAL_DEPS that are (uncompiled) local apps.
+ifneq ($(ALL_APPS_DIRS_TO_BUILD),)
+ $(verbose) set -e; for dep in $(ALL_APPS_DIRS_TO_BUILD); do \
+ if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \
+ :; \
+ else \
+ echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \
+ $(MAKE) -C $$dep $(if $(IS_TEST),test-build-app) IS_APP=1; \
+ fi \
+ done
+endif
+
+clean-tmp-deps.log:
+ifeq ($(IS_APP)$(IS_DEP),)
+ $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log $(ERLANG_MK_TMP)/deps.log
+endif
+
+# Erlang.mk does not rebuild dependencies after they were compiled
+# once. If a developer is working on the top-level project and some
+# dependencies at the same time, he may want to change this behavior.
+# There are two solutions:
+# 1. Set `FULL=1` so that all dependencies are visited and
+# recursively recompiled if necessary.
+# 2. Set `FORCE_REBUILD=` to the specific list of dependencies that
+# should be recompiled (instead of the whole set).
+
+FORCE_REBUILD ?=
+
+ifeq ($(origin FULL),undefined)
+ifneq ($(strip $(force_rebuild_dep)$(FORCE_REBUILD)),)
+define force_rebuild_dep
+echo "$(FORCE_REBUILD)" | grep -qw "$$(basename "$1")"
+endef
+endif
+endif
+
+ifneq ($(SKIP_DEPS),)
+deps::
+else
+deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | $(ERLANG_MK_TMP)
+ifneq ($(ALL_DEPS_DIRS),)
+ $(verbose) set -e; for dep in $(ALL_DEPS_DIRS); do \
+ if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \
+ :; \
+ else \
+ echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \
+ if [ -z "$(strip $(FULL))" ] $(if $(force_rebuild_dep),&& ! ($(call force_rebuild_dep,$$dep)),) && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
+ :; \
+ elif [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \
+ $(MAKE) -C $$dep IS_DEP=1; \
+ if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
+ else \
+ echo "Error: No Makefile to build dependency $$dep." >&2; \
+ exit 2; \
+ fi \
+ fi \
+ done
+endif
+endif
+
+# Deps related targets.
+
+# @todo rename GNUmakefile and makefile into Makefile first, if they exist
+# While Makefile file could be GNUmakefile or makefile,
+# in practice only Makefile is needed so far.
+define dep_autopatch
+ if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \
+ rm -rf $(DEPS_DIR)/$1/ebin/; \
+ $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+ $(call dep_autopatch_erlang_mk,$(1)); \
+ elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+ if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \
+ $(call dep_autopatch2,$1); \
+ elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+ $(call dep_autopatch2,$(1)); \
+ elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \
+ $(call dep_autopatch2,$(1)); \
+ elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \
+ $(call dep_autopatch2,$(1)); \
+ fi \
+ else \
+ if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \
+ $(call dep_autopatch_noop,$(1)); \
+ else \
+ $(call dep_autopatch2,$(1)); \
+ fi \
+ fi
+endef
+
+define dep_autopatch2
+ ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \
+ mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \
+ rm -f $(DEPS_DIR)/$1/ebin/$1.app; \
+ if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \
+ $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \
+ fi; \
+ $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \
+ if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \
+ $(call dep_autopatch_fetch_rebar); \
+ $(call dep_autopatch_rebar,$(1)); \
+ else \
+ $(call dep_autopatch_gen,$(1)); \
+ fi
+endef
+
+define dep_autopatch_noop
+ printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# Replace "include erlang.mk" with a line that will load the parent Erlang.mk
+# if given. Do it for all 3 possible Makefile file names.
+ifeq ($(NO_AUTOPATCH_ERLANG_MK),)
+define dep_autopatch_erlang_mk
+ for f in Makefile makefile GNUmakefile; do \
+ if [ -f $(DEPS_DIR)/$1/$$f ]; then \
+ sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \
+ fi \
+ done
+endef
+else
+define dep_autopatch_erlang_mk
+ :
+endef
+endif
+
+define dep_autopatch_gen
+ printf "%s\n" \
+ "ERLC_OPTS = +debug_info" \
+ "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile
+endef
+
+# We use flock/lockf when available to avoid concurrency issues.
+define dep_autopatch_fetch_rebar
+ if command -v flock >/dev/null; then \
+ flock $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
+ elif command -v lockf >/dev/null; then \
+ lockf $(ERLANG_MK_TMP)/rebar.lock sh -c "$(call dep_autopatch_fetch_rebar2)"; \
+ else \
+ $(call dep_autopatch_fetch_rebar2); \
+ fi
+endef
+
+define dep_autopatch_fetch_rebar2
+ if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \
+ git clone -q -n -- $(REBAR_GIT) $(ERLANG_MK_TMP)/rebar; \
+ cd $(ERLANG_MK_TMP)/rebar; \
+ git checkout -q $(REBAR_COMMIT); \
+ ./bootstrap; \
+ cd -; \
+ fi
+endef
+
+define dep_autopatch_rebar
+ if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \
+ mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \
+ fi; \
+ $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \
+ rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app
+endef
+
+define dep_autopatch_rebar.erl
+ application:load(rebar),
+ application:set_env(rebar, log_level, debug),
+ rmemo:start(),
+ Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of
+ {ok, Conf0} -> Conf0;
+ _ -> []
+ end,
+ {Conf, OsEnv} = fun() ->
+ case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of
+ false -> {Conf1, []};
+ true ->
+ Bindings0 = erl_eval:new_bindings(),
+ Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0),
+ Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1),
+ Before = os:getenv(),
+ {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings),
+ {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)}
+ end
+ end(),
+ Write = fun (Text) ->
+ file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append])
+ end,
+ Escape = fun (Text) ->
+ re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}])
+ end,
+ Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package "
+ "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"),
+ Write("C_SRC_DIR = /path/do/not/exist\n"),
+ Write("C_SRC_TYPE = rebar\n"),
+ Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"),
+ Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]),
+ ToList = fun
+ (V) when is_atom(V) -> atom_to_list(V);
+ (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'"
+ end,
+ fun() ->
+ Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"),
+ case lists:keyfind(erl_opts, 1, Conf) of
+ false -> ok;
+ {_, ErlOpts} ->
+ lists:foreach(fun
+ ({d, D}) ->
+ Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
+ ({d, DKey, DVal}) ->
+ Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n");
+ ({i, I}) ->
+ Write(["ERLC_OPTS += -I ", I, "\n"]);
+ ({platform_define, Regex, D}) ->
+ case rebar_utils:is_arch(Regex) of
+ true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n");
+ false -> ok
+ end;
+ ({parse_transform, PT}) ->
+ Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n");
+ (_) -> ok
+ end, ErlOpts)
+ end,
+ Write("\n")
+ end(),
+ GetHexVsn = fun(N, NP) ->
+ case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of
+ {ok, Lock} ->
+ io:format("~p~n", [Lock]),
+ case lists:keyfind("1.1.0", 1, Lock) of
+ {_, LockPkgs} ->
+ io:format("~p~n", [LockPkgs]),
+ case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of
+ {_, {pkg, _, Vsn}, _} ->
+ io:format("~p~n", [Vsn]),
+ {N, {hex, NP, binary_to_list(Vsn)}};
+ _ ->
+ false
+ end;
+ _ ->
+ false
+ end;
+ _ ->
+ false
+ end
+ end,
+ SemVsn = fun
+ ("~>" ++ S0) ->
+ S = case S0 of
+ " " ++ S1 -> S1;
+ _ -> S0
+ end,
+ case length([ok || $$. <- S]) of
+ 0 -> S ++ ".0.0";
+ 1 -> S ++ ".0";
+ _ -> S
+ end;
+ (S) -> S
+ end,
+ fun() ->
+ File = case lists:keyfind(deps, 1, Conf) of
+ false -> [];
+ {_, Deps} ->
+ [begin case case Dep of
+ N when is_atom(N) -> GetHexVsn(N, N);
+ {N, S} when is_atom(N), is_list(S) -> {N, {hex, N, SemVsn(S)}};
+ {N, {pkg, NP}} when is_atom(N) -> GetHexVsn(N, NP);
+ {N, S, {pkg, NP}} -> {N, {hex, NP, S}};
+ {N, S} when is_tuple(S) -> {N, S};
+ {N, _, S} -> {N, S};
+ {N, _, S, _} -> {N, S};
+ _ -> false
+ end of
+ false -> ok;
+ {Name, Source} ->
+ {Method, Repo, Commit} = case Source of
+ {hex, NPV, V} -> {hex, V, NPV};
+ {git, R} -> {git, R, master};
+ {M, R, {branch, C}} -> {M, R, C};
+ {M, R, {ref, C}} -> {M, R, C};
+ {M, R, {tag, C}} -> {M, R, C};
+ {M, R, C} -> {M, R, C}
+ end,
+ Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit]))
+ end end || Dep <- Deps]
+ end
+ end(),
+ fun() ->
+ case lists:keyfind(erl_first_files, 1, Conf) of
+ false -> ok;
+ {_, Files} ->
+ Names = [[" ", case lists:reverse(F) of
+ "lre." ++ Elif -> lists:reverse(Elif);
+ "lrx." ++ Elif -> lists:reverse(Elif);
+ "lry." ++ Elif -> lists:reverse(Elif);
+ Elif -> lists:reverse(Elif)
+ end] || "src/" ++ F <- Files],
+ Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names]))
+ end
+ end(),
+ Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"),
+ Write("\npreprocess::\n"),
+ Write("\npre-deps::\n"),
+ Write("\npre-app::\n"),
+ PatchHook = fun(Cmd) ->
+ Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]),
+ case Cmd2 of
+ "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+ "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1);
+ "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+ "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1);
+ _ -> Escape(Cmd)
+ end
+ end,
+ fun() ->
+ case lists:keyfind(pre_hooks, 1, Conf) of
+ false -> ok;
+ {_, Hooks} ->
+ [case H of
+ {'get-deps', Cmd} ->
+ Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n");
+ {compile, Cmd} ->
+ Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+ {Regex, compile, Cmd} ->
+ case rebar_utils:is_arch(Regex) of
+ true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n");
+ false -> ok
+ end;
+ _ -> ok
+ end || H <- Hooks]
+ end
+ end(),
+ ShellToMk = fun(V0) ->
+ V1 = re:replace(V0, "[$$][(]", "$$\(shell ", [global]),
+ V = re:replace(V1, "([$$])(?![(])(\\\\w*)", "\\\\1(\\\\2)", [global]),
+ re:replace(V, "-Werror\\\\b", "", [{return, list}, global])
+ end,
+ PortSpecs = fun() ->
+ case lists:keyfind(port_specs, 1, Conf) of
+ false ->
+ case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of
+ false -> [];
+ true ->
+ [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"),
+ proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}]
+ end;
+ {_, Specs} ->
+ lists:flatten([case S of
+ {Output, Input} -> {ShellToMk(Output), Input, []};
+ {Regex, Output, Input} ->
+ case rebar_utils:is_arch(Regex) of
+ true -> {ShellToMk(Output), Input, []};
+ false -> []
+ end;
+ {Regex, Output, Input, [{env, Env}]} ->
+ case rebar_utils:is_arch(Regex) of
+ true -> {ShellToMk(Output), Input, Env};
+ false -> []
+ end
+ end || S <- Specs])
+ end
+ end(),
+ PortSpecWrite = fun (Text) ->
+ file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append])
+ end,
+ case PortSpecs of
+ [] -> ok;
+ _ ->
+ Write("\npre-app::\n\t@$$\(MAKE) --no-print-directory -f c_src/Makefile.erlang.mk\n"),
+ PortSpecWrite(io_lib:format("ERL_CFLAGS ?= -finline-functions -Wall -fPIC -I \\"~s/erts-~s/include\\" -I \\"~s\\"\n",
+ [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])),
+ PortSpecWrite(io_lib:format("ERL_LDFLAGS ?= -L \\"~s\\" -lei\n",
+ [code:lib_dir(erl_interface, lib)])),
+ [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv],
+ FilterEnv = fun(Env) ->
+ lists:flatten([case E of
+ {_, _} -> E;
+ {Regex, K, V} ->
+ case rebar_utils:is_arch(Regex) of
+ true -> {K, V};
+ false -> []
+ end
+ end || E <- Env])
+ end,
+ MergeEnv = fun(Env) ->
+ lists:foldl(fun ({K, V}, Acc) ->
+ case lists:keyfind(K, 1, Acc) of
+ false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc];
+ {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc]
+ end
+ end, [], Env)
+ end,
+ PortEnv = case lists:keyfind(port_env, 1, Conf) of
+ false -> [];
+ {_, PortEnv0} -> FilterEnv(PortEnv0)
+ end,
+ PortSpec = fun ({Output, Input0, Env}) ->
+ filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output),
+ Input = [[" ", I] || I <- Input0],
+ PortSpecWrite([
+ [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))],
+ case $(PLATFORM) of
+ darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress";
+ _ -> ""
+ end,
+ "\n\nall:: ", Output, "\n\t@:\n\n",
+ "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+ "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+ "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+ "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n",
+ [[Output, ": ", K, " += ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))],
+ Output, ": $$\(foreach ext,.c .C .cc .cpp,",
+ "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n",
+ "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)",
+ case {filename:extension(Output), $(PLATFORM)} of
+ {[], _} -> "\n";
+ {_, darwin} -> "\n";
+ _ -> " -shared\n"
+ end])
+ end,
+ [PortSpec(S) || S <- PortSpecs]
+ end,
+ fun() ->
+ case lists:keyfind(plugins, 1, Conf) of
+ false -> ok;
+ {_, Plugins0} ->
+ Plugins = [P || P <- Plugins0, is_tuple(P)],
+ case lists:keyfind('lfe-compile', 1, Plugins) of
+ false -> ok;
+ _ -> Write("\nBUILD_DEPS = lfe lfe.mk\ndep_lfe.mk = git https://github.com/ninenines/lfe.mk master\nDEP_PLUGINS = lfe.mk\n")
+ end
+ end
+ end(),
+ Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"),
+ RunPlugin = fun(Plugin, Step) ->
+ case erlang:function_exported(Plugin, Step, 2) of
+ false -> ok;
+ true ->
+ c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"),
+ Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(),
+ dict:store(base_dir, "", dict:new())}, undefined),
+ io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret])
+ end
+ end,
+ fun() ->
+ case lists:keyfind(plugins, 1, Conf) of
+ false -> ok;
+ {_, Plugins0} ->
+ Plugins = [P || P <- Plugins0, is_atom(P)],
+ [begin
+ case lists:keyfind(deps, 1, Conf) of
+ false -> ok;
+ {_, Deps} ->
+ case lists:keyfind(P, 1, Deps) of
+ false -> ok;
+ _ ->
+ Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P),
+ io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]),
+ io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]),
+ code:add_patha(Path ++ "/ebin")
+ end
+ end
+ end || P <- Plugins],
+ [case code:load_file(P) of
+ {module, P} -> ok;
+ _ ->
+ case lists:keyfind(plugin_dir, 1, Conf) of
+ false -> ok;
+ {_, PluginsDir} ->
+ ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl",
+ {ok, P, Bin} = compile:file(ErlFile, [binary]),
+ {module, P} = code:load_binary(P, ErlFile, Bin)
+ end
+ end || P <- Plugins],
+ [RunPlugin(P, preprocess) || P <- Plugins],
+ [RunPlugin(P, pre_compile) || P <- Plugins],
+ [RunPlugin(P, compile) || P <- Plugins]
+ end
+ end(),
+ halt()
+endef
+
+define dep_autopatch_appsrc_script.erl
+ AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+ AppSrcScript = AppSrc ++ ".script",
+ {ok, Conf0} = file:consult(AppSrc),
+ Bindings0 = erl_eval:new_bindings(),
+ Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0),
+ Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1),
+ Conf = case file:script(AppSrcScript, Bindings) of
+ {ok, [C]} -> C;
+ {ok, C} -> C
+ end,
+ ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])),
+ halt()
+endef
+
+define dep_autopatch_appsrc.erl
+ AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)",
+ AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end,
+ case filelib:is_regular(AppSrcIn) of
+ false -> ok;
+ true ->
+ {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn),
+ L1 = lists:keystore(modules, 1, L0, {modules, []}),
+ L2 = case lists:keyfind(vsn, 1, L1) of
+ {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, lists:droplast(os:cmd("git -C $(DEPS_DIR)/$1 describe --dirty --tags --always"))});
+ {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"});
+ _ -> L1
+ end,
+ L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end,
+ ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])),
+ case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end
+ end,
+ halt()
+endef
+
+define dep_fetch_git
+ git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+ cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_git-subfolder
+ mkdir -p $(ERLANG_MK_TMP)/git-subfolder; \
+ git clone -q -n -- $(call dep_repo,$1) \
+ $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1); \
+ cd $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1) \
+ && git checkout -q $(call dep_commit,$1); \
+ ln -s $(ERLANG_MK_TMP)/git-subfolder/$(call dep_name,$1)/$(word 4,$(dep_$(1))) \
+ $(DEPS_DIR)/$(call dep_name,$1);
+endef
+
+define dep_fetch_git-submodule
+ git submodule update --init -- $(DEPS_DIR)/$1;
+endef
+
+define dep_fetch_hg
+ hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \
+ cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1));
+endef
+
+define dep_fetch_svn
+ svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_cp
+ cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+define dep_fetch_ln
+ ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1));
+endef
+
+# Hex only has a package version. No need to look in the Erlang.mk packages.
+define dep_fetch_hex
+ mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \
+ $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\
+ https://repo.hex.pm/tarballs/$(if $(word 3,$(dep_$1)),$(word 3,$(dep_$1)),$1)-$(strip $(word 2,$(dep_$1))).tar); \
+ tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -;
+endef
+
+define dep_fetch_fail
+ echo "Error: Unknown or invalid dependency: $(1)." >&2; \
+ exit 78;
+endef
+
+# Kept for compatibility purposes with older Erlang.mk configuration.
+define dep_fetch_legacy
+ $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \
+ git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \
+ cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master);
+endef
+
+define dep_target
+$(DEPS_DIR)/$(call dep_name,$1): | $(ERLANG_MK_TMP)
+ $(eval DEP_NAME := $(call dep_name,$1))
+ $(eval DEP_STR := $(if $(filter $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))"))
+ $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \
+ echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \
+ exit 17; \
+ fi
+ $(verbose) mkdir -p $(DEPS_DIR)
+ $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$(1))),$(1))
+ $(verbose) if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ] \
+ && [ ! -f $(DEPS_DIR)/$(1)/configure ]; then \
+ echo " AUTO " $(DEP_STR); \
+ cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \
+ fi
+ - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \
+ echo " CONF " $(DEP_STR); \
+ cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \
+ fi
+ifeq ($(filter $(1),$(NO_AUTOPATCH)),)
+ $(verbose) $$(MAKE) --no-print-directory autopatch-$(DEP_NAME)
+endif
+
+.PHONY: autopatch-$(call dep_name,$1)
+
+autopatch-$(call dep_name,$1)::
+ $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \
+ if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+ echo " PATCH Downloading rabbitmq-codegen"; \
+ git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+ fi; \
+ if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \
+ echo " PATCH Downloading rabbitmq-server"; \
+ git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \
+ fi; \
+ ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \
+ elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \
+ if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \
+ echo " PATCH Downloading rabbitmq-codegen"; \
+ git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \
+ fi \
+ elif [ "$1" = "elixir" -a "$(ELIXIR_PATCH)" ]; then \
+ ln -s lib/elixir/ebin $(DEPS_DIR)/elixir/; \
+ else \
+ $$(call dep_autopatch,$(call dep_name,$1)) \
+ fi
+endef
+
+$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep))))
+
+ifndef IS_APP
+clean:: clean-apps
+
+clean-apps:
+ $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
+ $(MAKE) -C $$dep clean IS_APP=1; \
+ done
+
+distclean:: distclean-apps
+
+distclean-apps:
+ $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \
+ $(MAKE) -C $$dep distclean IS_APP=1; \
+ done
+endif
+
+ifndef SKIP_DEPS
+distclean:: distclean-deps
+
+distclean-deps:
+ $(gen_verbose) rm -rf $(DEPS_DIR)
+endif
+
+# Forward-declare variables used in core/deps-tools.mk. This is required
+# in case plugins use them.
+
+ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-deps-list.log
+ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-doc-deps-list.log
+ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-rel-deps-list.log
+ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log
+ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log
+
+ERLANG_MK_QUERY_DEPS_FILE = $(ERLANG_MK_TMP)/query-deps.log
+ERLANG_MK_QUERY_DOC_DEPS_FILE = $(ERLANG_MK_TMP)/query-doc-deps.log
+ERLANG_MK_QUERY_REL_DEPS_FILE = $(ERLANG_MK_TMP)/query-rel-deps.log
+ERLANG_MK_QUERY_TEST_DEPS_FILE = $(ERLANG_MK_TMP)/query-test-deps.log
+ERLANG_MK_QUERY_SHELL_DEPS_FILE = $(ERLANG_MK_TMP)/query-shell-deps.log
+
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-app
+
+# Configuration.
+
+ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \
+ +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec
+COMPILE_FIRST ?=
+COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST)))
+ERLC_EXCLUDE ?=
+ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE)))
+
+ERLC_ASN1_OPTS ?=
+
+ERLC_MIB_OPTS ?=
+COMPILE_MIB_FIRST ?=
+COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST)))
+
+# Verbosity.
+
+app_verbose_0 = @echo " APP " $(PROJECT);
+app_verbose_2 = set -x;
+app_verbose = $(app_verbose_$(V))
+
+appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src;
+appsrc_verbose_2 = set -x;
+appsrc_verbose = $(appsrc_verbose_$(V))
+
+makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d;
+makedep_verbose_2 = set -x;
+makedep_verbose = $(makedep_verbose_$(V))
+
+erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+ $(filter %.erl %.core,$(?F)));
+erlc_verbose_2 = set -x;
+erlc_verbose = $(erlc_verbose_$(V))
+
+xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F));
+xyrl_verbose_2 = set -x;
+xyrl_verbose = $(xyrl_verbose_$(V))
+
+asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F));
+asn1_verbose_2 = set -x;
+asn1_verbose = $(asn1_verbose_$(V))
+
+mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F));
+mib_verbose_2 = set -x;
+mib_verbose = $(mib_verbose_$(V))
+
+ifneq ($(wildcard src/),)
+
+# Targets.
+
+app:: $(if $(wildcard ebin/test),clean) deps
+ $(verbose) $(MAKE) --no-print-directory $(PROJECT).d
+ $(verbose) $(MAKE) --no-print-directory app-build
+
+ifeq ($(wildcard src/$(PROJECT_MOD).erl),)
+define app_file
+{application, '$(PROJECT)', [
+ {description, "$(PROJECT_DESCRIPTION)"},
+ {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+ {id$(comma)$(space)"$(1)"}$(comma))
+ {modules, [$(call comma_list,$(2))]},
+ {registered, []},
+ {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
+ {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
+]}.
+endef
+else
+define app_file
+{application, '$(PROJECT)', [
+ {description, "$(PROJECT_DESCRIPTION)"},
+ {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP),
+ {id$(comma)$(space)"$(1)"}$(comma))
+ {modules, [$(call comma_list,$(2))]},
+ {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]},
+ {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]},
+ {mod, {$(PROJECT_MOD), []}},
+ {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),)
+]}.
+endef
+endif
+
+app-build: ebin/$(PROJECT).app
+ $(verbose) :
+
+# Source files.
+
+ALL_SRC_FILES := $(sort $(call core_find,src/,*))
+
+ERL_FILES := $(filter %.erl,$(ALL_SRC_FILES))
+CORE_FILES := $(filter %.core,$(ALL_SRC_FILES))
+
+# ASN.1 files.
+
+ifneq ($(wildcard asn1/),)
+ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1))
+ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+define compile_asn1
+ $(verbose) mkdir -p include/
+ $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1)
+ $(verbose) mv asn1/*.erl src/
+ -$(verbose) mv asn1/*.hrl include/
+ $(verbose) mv asn1/*.asn1db include/
+endef
+
+$(PROJECT).d:: $(ASN1_FILES)
+ $(if $(strip $?),$(call compile_asn1,$?))
+endif
+
+# SNMP MIB files.
+
+ifneq ($(wildcard mibs/),)
+MIB_FILES = $(sort $(call core_find,mibs/,*.mib))
+
+$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES)
+ $(verbose) mkdir -p include/ priv/mibs/
+ $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $?
+ $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?)))
+endif
+
+# Leex and Yecc files.
+
+XRL_FILES := $(filter %.xrl,$(ALL_SRC_FILES))
+XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES))))
+ERL_FILES += $(XRL_ERL_FILES)
+
+YRL_FILES := $(filter %.yrl,$(ALL_SRC_FILES))
+YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES))))
+ERL_FILES += $(YRL_ERL_FILES)
+
+$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES)
+ $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $(YRL_ERLC_OPTS) $?)
+
+# Erlang and Core Erlang files.
+
+define makedep.erl
+ E = ets:new(makedep, [bag]),
+ G = digraph:new([acyclic]),
+ ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")),
+ DepsDir = "$(call core_native_path,$(DEPS_DIR))",
+ AppsDir = "$(call core_native_path,$(APPS_DIR))",
+ DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))",
+ DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))",
+ AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))",
+ AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))",
+ DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")),
+ AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")),
+ Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles],
+ Add = fun (Mod, Dep) ->
+ case lists:keyfind(Dep, 1, Modules) of
+ false -> ok;
+ {_, DepFile} ->
+ {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+ ets:insert(E, {ModFile, DepFile}),
+ digraph:add_vertex(G, Mod),
+ digraph:add_vertex(G, Dep),
+ digraph:add_edge(G, Mod, Dep)
+ end
+ end,
+ AddHd = fun (F, Mod, DepFile) ->
+ case file:open(DepFile, [read]) of
+ {error, enoent} ->
+ ok;
+ {ok, Fd} ->
+ {_, ModFile} = lists:keyfind(Mod, 1, Modules),
+ case ets:match(E, {ModFile, DepFile}) of
+ [] ->
+ ets:insert(E, {ModFile, DepFile}),
+ F(F, Fd, Mod,0);
+ _ -> ok
+ end
+ end
+ end,
+ SearchHrl = fun
+ F(_Hrl, []) -> {error,enoent};
+ F(Hrl, [Dir|Dirs]) ->
+ HrlF = filename:join([Dir,Hrl]),
+ case filelib:is_file(HrlF) of
+ true ->
+ {ok, HrlF};
+ false -> F(Hrl,Dirs)
+ end
+ end,
+ Attr = fun
+ (_F, Mod, behavior, Dep) ->
+ Add(Mod, Dep);
+ (_F, Mod, behaviour, Dep) ->
+ Add(Mod, Dep);
+ (_F, Mod, compile, {parse_transform, Dep}) ->
+ Add(Mod, Dep);
+ (_F, Mod, compile, Opts) when is_list(Opts) ->
+ case proplists:get_value(parse_transform, Opts) of
+ undefined -> ok;
+ Dep -> Add(Mod, Dep)
+ end;
+ (F, Mod, include, Hrl) ->
+ case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
+ {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
+ {error, _} -> false
+ end;
+ (F, Mod, include_lib, Hrl) ->
+ case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of
+ {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl);
+ {error, _} -> false
+ end;
+ (F, Mod, import, {Imp, _}) ->
+ IsFile =
+ case lists:keyfind(Imp, 1, Modules) of
+ false -> false;
+ {_, FilePath} -> filelib:is_file(FilePath)
+ end,
+ case IsFile of
+ false -> ok;
+ true -> Add(Mod, Imp)
+ end;
+ (_, _, _, _) -> ok
+ end,
+ MakeDepend = fun
+ (F, Fd, Mod, StartLocation) ->
+ {ok, Filename} = file:pid2name(Fd),
+ case io:parse_erl_form(Fd, undefined, StartLocation) of
+ {ok, AbsData, EndLocation} ->
+ case AbsData of
+ {attribute, _, Key, Value} ->
+ Attr(F, Mod, Key, Value),
+ F(F, Fd, Mod, EndLocation);
+ _ -> F(F, Fd, Mod, EndLocation)
+ end;
+ {eof, _ } -> file:close(Fd);
+ {error, ErrorDescription } ->
+ file:close(Fd);
+ {error, ErrorInfo, ErrorLocation} ->
+ F(F, Fd, Mod, ErrorLocation)
+ end,
+ ok
+ end,
+ [begin
+ Mod = list_to_atom(filename:basename(F, ".erl")),
+ case file:open(F, [read]) of
+ {ok, Fd} -> MakeDepend(MakeDepend, Fd, Mod,0);
+ {error, enoent} -> ok
+ end
+ end || F <- ErlFiles],
+ Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))),
+ CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)],
+ TargetPath = fun(Target) ->
+ case lists:keyfind(Target, 1, Modules) of
+ false -> "";
+ {_, DepFile} ->
+ DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")),
+ string:join(DirSubname ++ [atom_to_list(Target)], "/")
+ end
+ end,
+ Output0 = [
+ "# Generated by Erlang.mk. Edit at your own risk!\n\n",
+ [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend],
+ "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n"
+ ],
+ Output = case "é" of
+ [233] -> unicode:characters_to_binary(Output0);
+ _ -> Output0
+ end,
+ ok = file:write_file("$(1)", Output),
+ halt()
+endef
+
+ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),)
+$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST)
+ $(makedep_verbose) $(call erlang,$(call makedep.erl,$@))
+endif
+
+ifeq ($(IS_APP)$(IS_DEP),)
+ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0)
+# Rebuild everything when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
+ $(verbose) if test -f $@; then \
+ touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \
+ touch -c $(PROJECT).d; \
+ fi
+ $(verbose) touch $@
+
+$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change
+endif
+endif
+
+$(PROJECT).d::
+ $(verbose) :
+
+include $(wildcard $(PROJECT).d)
+
+ebin/$(PROJECT).app:: ebin/
+
+ebin/:
+ $(verbose) mkdir -p ebin/
+
+define compile_erl
+ $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \
+ -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1))
+endef
+
+define validate_app_file
+ case file:consult("ebin/$(PROJECT).app") of
+ {ok, _} -> halt();
+ _ -> halt(1)
+ end
+endef
+
+ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src)
+ $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?))
+ $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE)))
+# Older git versions do not have the --first-parent flag. Do without in that case.
+ $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null \
+ || git describe --dirty --abbrev=7 --tags --always 2>/dev/null || true))
+ $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+ $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES)))))))
+ifeq ($(wildcard src/$(PROJECT).app.src),)
+ $(app_verbose) printf '$(subst %,%%,$(subst $(newline),\n,$(subst ','\'',$(call app_file,$(GITDESCRIBE),$(MODULES)))))' \
+ > ebin/$(PROJECT).app
+ $(verbose) if ! $(call erlang,$(call validate_app_file)); then \
+ echo "The .app file produced is invalid. Please verify the value of PROJECT_ENV." >&2; \
+ exit 1; \
+ fi
+else
+ $(verbose) if [ -z "$$(grep -e '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \
+ echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk documentation for instructions." >&2; \
+ exit 1; \
+ fi
+ $(appsrc_verbose) cat src/$(PROJECT).app.src \
+ | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \
+ | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(subst /,\/,$(GITDESCRIBE))\"}/" \
+ > ebin/$(PROJECT).app
+endif
+ifneq ($(wildcard src/$(PROJECT).appup),)
+ $(verbose) cp src/$(PROJECT).appup ebin/
+endif
+
+clean:: clean-app
+
+clean-app:
+ $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \
+ $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \
+ $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \
+ $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \
+ $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES))))
+
+endif
+
+# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: docs-deps
+
+# Configuration.
+
+ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS))
+
+# Targets.
+
+$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+doc-deps:
+else
+doc-deps: $(ALL_DOC_DEPS_DIRS)
+ $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done
+endif
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rel-deps
+
+# Configuration.
+
+ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS))
+
+# Targets.
+
+$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+rel-deps:
+else
+rel-deps: $(ALL_REL_DEPS_DIRS)
+ $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done
+endif
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: test-deps test-dir test-build clean-test-dir
+
+# Configuration.
+
+TEST_DIR ?= $(CURDIR)/test
+
+ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS))
+
+TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard
+TEST_ERLC_OPTS += -DTEST=1
+
+# Targets.
+
+$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+test-deps:
+else
+test-deps: $(ALL_TEST_DEPS_DIRS)
+ $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do \
+ if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
+ :; \
+ else \
+ $(MAKE) -C $$dep IS_DEP=1; \
+ if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
+ fi \
+ done
+endif
+
+ifneq ($(wildcard $(TEST_DIR)),)
+test-dir: $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
+ @:
+
+test_erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\
+ $(filter %.erl %.core,$(notdir $(FILES_TO_COMPILE))));
+test_erlc_verbose_2 = set -x;
+test_erlc_verbose = $(test_erlc_verbose_$(V))
+
+define compile_test_erl
+ $(test_erlc_verbose) erlc -v $(TEST_ERLC_OPTS) -o $(TEST_DIR) \
+ -pa ebin/ -I include/ $(1)
+endef
+
+ERL_TEST_FILES = $(call core_find,$(TEST_DIR)/,*.erl)
+$(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build: $(ERL_TEST_FILES) $(MAKEFILE_LIST)
+ $(eval FILES_TO_COMPILE := $(if $(filter $(MAKEFILE_LIST),$?),$(filter $(ERL_TEST_FILES),$^),$?))
+ $(if $(strip $(FILES_TO_COMPILE)),$(call compile_test_erl,$(FILES_TO_COMPILE)) && touch $@)
+endif
+
+test-build:: IS_TEST=1
+test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build:: $(if $(wildcard src),$(if $(wildcard ebin/test),,clean)) $(if $(IS_APP),,deps test-deps)
+# We already compiled everything when IS_APP=1.
+ifndef IS_APP
+ifneq ($(wildcard src),)
+ $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
+ $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
+ $(gen_verbose) touch ebin/test
+endif
+ifneq ($(wildcard $(TEST_DIR)),)
+ $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
+endif
+endif
+
+# Roughly the same as test-build, but when IS_APP=1.
+# We only care about compiling the current application.
+ifdef IS_APP
+test-build-app:: ERLC_OPTS=$(TEST_ERLC_OPTS)
+test-build-app:: deps test-deps
+ifneq ($(wildcard src),)
+ $(verbose) $(MAKE) --no-print-directory $(PROJECT).d ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
+ $(verbose) $(MAKE) --no-print-directory app-build ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
+ $(gen_verbose) touch ebin/test
+endif
+ifneq ($(wildcard $(TEST_DIR)),)
+ $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))"
+endif
+endif
+
+clean:: clean-test-dir
+
+clean-test-dir:
+ifneq ($(wildcard $(TEST_DIR)/*.beam),)
+ $(gen_verbose) rm -f $(TEST_DIR)/*.beam $(ERLANG_MK_TMP)/$(PROJECT).last-testdir-build
+endif
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: rebar.config
+
+# We strip out -Werror because we don't want to fail due to
+# warnings when used as a dependency.
+
+compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/g')
+
+define compat_convert_erlc_opts
+$(if $(filter-out -Werror,$1),\
+ $(if $(findstring +,$1),\
+ $(shell echo $1 | cut -b 2-)))
+endef
+
+define compat_erlc_opts_to_list
+[$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$1),$(call compat_convert_erlc_opts,$o)))]
+endef
+
+define compat_rebar_config
+{deps, [
+$(call comma_list,$(foreach d,$(DEPS),\
+ $(if $(filter hex,$(call dep_fetch,$d)),\
+ {$(call dep_name,$d)$(comma)"$(call dep_repo,$d)"},\
+ {$(call dep_name,$d)$(comma)".*"$(comma){git,"$(call dep_repo,$d)"$(comma)"$(call dep_commit,$d)"}})))
+]}.
+{erl_opts, $(call compat_erlc_opts_to_list,$(ERLC_OPTS))}.
+endef
+
+rebar.config:
+ $(gen_verbose) $(call core_render,compat_rebar_config,rebar.config)
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter asciideck,$(DEPS) $(DOC_DEPS)),asciideck)
+
+.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc-guide distclean-asciidoc-manual
+
+# Core targets.
+
+docs:: asciidoc
+
+distclean:: distclean-asciidoc-guide distclean-asciidoc-manual
+
+# Plugin-specific targets.
+
+asciidoc: asciidoc-guide asciidoc-manual
+
+# User guide.
+
+ifeq ($(wildcard doc/src/guide/book.asciidoc),)
+asciidoc-guide:
+else
+asciidoc-guide: distclean-asciidoc-guide doc-deps
+ a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf
+ a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/
+
+distclean-asciidoc-guide:
+ $(gen_verbose) rm -rf doc/html/ doc/guide.pdf
+endif
+
+# Man pages.
+
+ASCIIDOC_MANUAL_FILES := $(wildcard doc/src/manual/*.asciidoc)
+
+ifeq ($(ASCIIDOC_MANUAL_FILES),)
+asciidoc-manual:
+else
+
+# Configuration.
+
+MAN_INSTALL_PATH ?= /usr/local/share/man
+MAN_SECTIONS ?= 3 7
+MAN_PROJECT ?= $(shell echo $(PROJECT) | sed 's/^./\U&\E/')
+MAN_VERSION ?= $(PROJECT_VERSION)
+
+# Plugin-specific targets.
+
+define asciidoc2man.erl
+try
+ [begin
+ io:format(" ADOC ~s~n", [F]),
+ ok = asciideck:to_manpage(asciideck:parse_file(F), #{
+ compress => gzip,
+ outdir => filename:dirname(F),
+ extra2 => "$(MAN_PROJECT) $(MAN_VERSION)",
+ extra3 => "$(MAN_PROJECT) Function Reference"
+ })
+ end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]],
+ halt(0)
+catch C:E ->
+ io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]),
+ halt(1)
+end.
+endef
+
+asciidoc-manual:: doc-deps
+
+asciidoc-manual:: $(ASCIIDOC_MANUAL_FILES)
+ $(gen_verbose) $(call erlang,$(call asciidoc2man.erl,$?))
+ $(verbose) $(foreach s,$(MAN_SECTIONS),mkdir -p doc/man$s/ && mv doc/src/manual/*.$s.gz doc/man$s/;)
+
+install-docs:: install-asciidoc
+
+install-asciidoc: asciidoc-manual
+ $(foreach s,$(MAN_SECTIONS),\
+ mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \
+ install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;)
+
+distclean-asciidoc-manual:
+ $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS))
+endif
+endif
+
+# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates
+
+# Core targets.
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Bootstrap targets:" \
+ " bootstrap Generate a skeleton of an OTP application" \
+ " bootstrap-lib Generate a skeleton of an OTP library" \
+ " bootstrap-rel Generate the files needed to build a release" \
+ " new-app in=NAME Create a new local OTP application NAME" \
+ " new-lib in=NAME Create a new local OTP library NAME" \
+ " new t=TPL n=NAME Generate a module NAME based on the template TPL" \
+ " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \
+ " list-templates List available templates"
+
+# Bootstrap templates.
+
+define bs_appsrc
+{application, $p, [
+ {description, ""},
+ {vsn, "0.1.0"},
+ {id, "git"},
+ {modules, []},
+ {registered, []},
+ {applications, [
+ kernel,
+ stdlib
+ ]},
+ {mod, {$p_app, []}},
+ {env, []}
+]}.
+endef
+
+define bs_appsrc_lib
+{application, $p, [
+ {description, ""},
+ {vsn, "0.1.0"},
+ {id, "git"},
+ {modules, []},
+ {registered, []},
+ {applications, [
+ kernel,
+ stdlib
+ ]}
+]}.
+endef
+
+# To prevent autocompletion issues with ZSH, we add "include erlang.mk"
+# separately during the actual bootstrap.
+define bs_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.1.0
+$(if $(SP),
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+)
+endef
+
+define bs_apps_Makefile
+PROJECT = $p
+PROJECT_DESCRIPTION = New project
+PROJECT_VERSION = 0.1.0
+$(if $(SP),
+# Whitespace to be used when creating files from templates.
+SP = $(SP)
+)
+# Make sure we know where the applications are located.
+ROOT_DIR ?= $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)
+APPS_DIR ?= ..
+DEPS_DIR ?= $(call core_relpath,$(DEPS_DIR),$(APPS_DIR)/app)
+
+include $$(ROOT_DIR)/erlang.mk
+endef
+
+define bs_app
+-module($p_app).
+-behaviour(application).
+
+-export([start/2]).
+-export([stop/1]).
+
+start(_Type, _Args) ->
+ $p_sup:start_link().
+
+stop(_State) ->
+ ok.
+endef
+
+define bs_relx_config
+{release, {$p_release, "1"}, [$p, sasl, runtime_tools]}.
+{extended_start_script, true}.
+{sys_config, "config/sys.config"}.
+{vm_args, "config/vm.args"}.
+endef
+
+define bs_sys_config
+[
+].
+endef
+
+define bs_vm_args
+-name $p@127.0.0.1
+-setcookie $p
+-heart
+endef
+
+# Normal templates.
+
+define tpl_supervisor
+-module($(n)).
+-behaviour(supervisor).
+
+-export([start_link/0]).
+-export([init/1]).
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+ Procs = [],
+ {ok, {{one_for_one, 1, 5}, Procs}}.
+endef
+
+define tpl_gen_server
+-module($(n)).
+-behaviour(gen_server).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_server.
+-export([init/1]).
+-export([handle_call/3]).
+-export([handle_cast/2]).
+-export([handle_info/2]).
+-export([terminate/2]).
+-export([code_change/3]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+ gen_server:start_link(?MODULE, [], []).
+
+%% gen_server.
+
+init([]) ->
+ {ok, #state{}}.
+
+handle_call(_Request, _From, State) ->
+ {reply, ignored, State}.
+
+handle_cast(_Msg, State) ->
+ {noreply, State}.
+
+handle_info(_Info, State) ->
+ {noreply, State}.
+
+terminate(_Reason, _State) ->
+ ok.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+endef
+
+define tpl_module
+-module($(n)).
+-export([]).
+endef
+
+define tpl_cowboy_http
+-module($(n)).
+-behaviour(cowboy_http_handler).
+
+-export([init/3]).
+-export([handle/2]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+ {ok, Req, #state{}}.
+
+handle(Req, State=#state{}) ->
+ {ok, Req2} = cowboy_req:reply(200, Req),
+ {ok, Req2, State}.
+
+terminate(_Reason, _Req, _State) ->
+ ok.
+endef
+
+define tpl_gen_fsm
+-module($(n)).
+-behaviour(gen_fsm).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_fsm.
+-export([init/1]).
+-export([state_name/2]).
+-export([handle_event/3]).
+-export([state_name/3]).
+-export([handle_sync_event/4]).
+-export([handle_info/3]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+ gen_fsm:start_link(?MODULE, [], []).
+
+%% gen_fsm.
+
+init([]) ->
+ {ok, state_name, #state{}}.
+
+state_name(_Event, StateData) ->
+ {next_state, state_name, StateData}.
+
+handle_event(_Event, StateName, StateData) ->
+ {next_state, StateName, StateData}.
+
+state_name(_Event, _From, StateData) ->
+ {reply, ignored, state_name, StateData}.
+
+handle_sync_event(_Event, _From, StateName, StateData) ->
+ {reply, ignored, StateName, StateData}.
+
+handle_info(_Info, StateName, StateData) ->
+ {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+ ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+ {ok, StateName, StateData}.
+endef
+
+define tpl_gen_statem
+-module($(n)).
+-behaviour(gen_statem).
+
+%% API.
+-export([start_link/0]).
+
+%% gen_statem.
+-export([callback_mode/0]).
+-export([init/1]).
+-export([state_name/3]).
+-export([handle_event/4]).
+-export([terminate/3]).
+-export([code_change/4]).
+
+-record(state, {
+}).
+
+%% API.
+
+-spec start_link() -> {ok, pid()}.
+start_link() ->
+ gen_statem:start_link(?MODULE, [], []).
+
+%% gen_statem.
+
+callback_mode() ->
+ state_functions.
+
+init([]) ->
+ {ok, state_name, #state{}}.
+
+state_name(_EventType, _EventData, StateData) ->
+ {next_state, state_name, StateData}.
+
+handle_event(_EventType, _EventData, StateName, StateData) ->
+ {next_state, StateName, StateData}.
+
+terminate(_Reason, _StateName, _StateData) ->
+ ok.
+
+code_change(_OldVsn, StateName, StateData, _Extra) ->
+ {ok, StateName, StateData}.
+endef
+
+define tpl_cowboy_loop
+-module($(n)).
+-behaviour(cowboy_loop_handler).
+
+-export([init/3]).
+-export([info/3]).
+-export([terminate/3]).
+
+-record(state, {
+}).
+
+init(_, Req, _Opts) ->
+ {loop, Req, #state{}, 5000, hibernate}.
+
+info(_Info, Req, State) ->
+ {loop, Req, State, hibernate}.
+
+terminate(_Reason, _Req, _State) ->
+ ok.
+endef
+
+define tpl_cowboy_rest
+-module($(n)).
+
+-export([init/3]).
+-export([content_types_provided/2]).
+-export([get_html/2]).
+
+init(_, _Req, _Opts) ->
+ {upgrade, protocol, cowboy_rest}.
+
+content_types_provided(Req, State) ->
+ {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}.
+
+get_html(Req, State) ->
+ {<<"<html><body>This is REST!</body></html>">>, Req, State}.
+endef
+
+define tpl_cowboy_ws
+-module($(n)).
+-behaviour(cowboy_websocket_handler).
+
+-export([init/3]).
+-export([websocket_init/3]).
+-export([websocket_handle/3]).
+-export([websocket_info/3]).
+-export([websocket_terminate/3]).
+
+-record(state, {
+}).
+
+init(_, _, _) ->
+ {upgrade, protocol, cowboy_websocket}.
+
+websocket_init(_, Req, _Opts) ->
+ Req2 = cowboy_req:compact(Req),
+ {ok, Req2, #state{}}.
+
+websocket_handle({text, Data}, Req, State) ->
+ {reply, {text, Data}, Req, State};
+websocket_handle({binary, Data}, Req, State) ->
+ {reply, {binary, Data}, Req, State};
+websocket_handle(_Frame, Req, State) ->
+ {ok, Req, State}.
+
+websocket_info(_Info, Req, State) ->
+ {ok, Req, State}.
+
+websocket_terminate(_Reason, _Req, _State) ->
+ ok.
+endef
+
+define tpl_ranch_protocol
+-module($(n)).
+-behaviour(ranch_protocol).
+
+-export([start_link/4]).
+-export([init/4]).
+
+-type opts() :: [].
+-export_type([opts/0]).
+
+-record(state, {
+ socket :: inet:socket(),
+ transport :: module()
+}).
+
+start_link(Ref, Socket, Transport, Opts) ->
+ Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]),
+ {ok, Pid}.
+
+-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok.
+init(Ref, Socket, Transport, _Opts) ->
+ ok = ranch:accept_ack(Ref),
+ loop(#state{socket=Socket, transport=Transport}).
+
+loop(State) ->
+ loop(State).
+endef
+
+# Plugin-specific targets.
+
+ifndef WS
+ifdef SP
+WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a))
+else
+WS = $(tab)
+endif
+endif
+
+bootstrap:
+ifneq ($(wildcard src/),)
+ $(error Error: src/ directory already exists)
+endif
+ $(eval p := $(PROJECT))
+ $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
+ $(error Error: Invalid characters in the application name))
+ $(eval n := $(PROJECT)_sup)
+ $(verbose) $(call core_render,bs_Makefile,Makefile)
+ $(verbose) echo "include erlang.mk" >> Makefile
+ $(verbose) mkdir src/
+ifdef LEGACY
+ $(verbose) $(call core_render,bs_appsrc,src/$(PROJECT).app.src)
+endif
+ $(verbose) $(call core_render,bs_app,src/$(PROJECT)_app.erl)
+ $(verbose) $(call core_render,tpl_supervisor,src/$(PROJECT)_sup.erl)
+
+bootstrap-lib:
+ifneq ($(wildcard src/),)
+ $(error Error: src/ directory already exists)
+endif
+ $(eval p := $(PROJECT))
+ $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
+ $(error Error: Invalid characters in the application name))
+ $(verbose) $(call core_render,bs_Makefile,Makefile)
+ $(verbose) echo "include erlang.mk" >> Makefile
+ $(verbose) mkdir src/
+ifdef LEGACY
+ $(verbose) $(call core_render,bs_appsrc_lib,src/$(PROJECT).app.src)
+endif
+
+bootstrap-rel:
+ifneq ($(wildcard relx.config),)
+ $(error Error: relx.config already exists)
+endif
+ifneq ($(wildcard config/),)
+ $(error Error: config/ directory already exists)
+endif
+ $(eval p := $(PROJECT))
+ $(verbose) $(call core_render,bs_relx_config,relx.config)
+ $(verbose) mkdir config/
+ $(verbose) $(call core_render,bs_sys_config,config/sys.config)
+ $(verbose) $(call core_render,bs_vm_args,config/vm.args)
+
+new-app:
+ifndef in
+ $(error Usage: $(MAKE) new-app in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+ $(error Error: Application $in already exists)
+endif
+ $(eval p := $(in))
+ $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
+ $(error Error: Invalid characters in the application name))
+ $(eval n := $(in)_sup)
+ $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+ $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+ $(verbose) $(call core_render,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+ $(verbose) $(call core_render,bs_app,$(APPS_DIR)/$p/src/$p_app.erl)
+ $(verbose) $(call core_render,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl)
+
+new-lib:
+ifndef in
+ $(error Usage: $(MAKE) new-lib in=APP)
+endif
+ifneq ($(wildcard $(APPS_DIR)/$in),)
+ $(error Error: Application $in already exists)
+endif
+ $(eval p := $(in))
+ $(if $(shell echo $p | LC_ALL=C grep -x "[a-z0-9_]*"),,\
+ $(error Error: Invalid characters in the application name))
+ $(verbose) mkdir -p $(APPS_DIR)/$p/src/
+ $(verbose) $(call core_render,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile)
+ifdef LEGACY
+ $(verbose) $(call core_render,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src)
+endif
+
+new:
+ifeq ($(wildcard src/)$(in),)
+ $(error Error: src/ directory does not exist)
+endif
+ifndef t
+ $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifndef n
+ $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP])
+endif
+ifdef in
+ $(verbose) $(call core_render,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl)
+else
+ $(verbose) $(call core_render,tpl_$(t),src/$(n).erl)
+endif
+
+list-templates:
+ $(verbose) @echo Available templates:
+ $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES))))
+
+# Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: clean-c_src distclean-c_src-env
+
+# Configuration.
+
+C_SRC_DIR ?= $(CURDIR)/c_src
+C_SRC_ENV ?= $(C_SRC_DIR)/env.mk
+C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT)
+C_SRC_TYPE ?= shared
+
+# System type and C compiler/flags.
+
+ifeq ($(PLATFORM),msys2)
+ C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?= .exe
+ C_SRC_OUTPUT_SHARED_EXTENSION ?= .dll
+else
+ C_SRC_OUTPUT_EXECUTABLE_EXTENSION ?=
+ C_SRC_OUTPUT_SHARED_EXTENSION ?= .so
+endif
+
+ifeq ($(C_SRC_TYPE),shared)
+ C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_SHARED_EXTENSION)
+else
+ C_SRC_OUTPUT_FILE = $(C_SRC_OUTPUT)$(C_SRC_OUTPUT_EXECUTABLE_EXTENSION)
+endif
+
+ifeq ($(PLATFORM),msys2)
+# We hardcode the compiler used on MSYS2. The default CC=cc does
+# not produce working code. The "gcc" MSYS2 package also doesn't.
+ CC = /mingw64/bin/gcc
+ export CC
+ CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+ CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),darwin)
+ CC ?= cc
+ CFLAGS ?= -O3 -std=c99 -arch x86_64 -Wall -Wmissing-prototypes
+ CXXFLAGS ?= -O3 -arch x86_64 -Wall
+ LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress
+else ifeq ($(PLATFORM),freebsd)
+ CC ?= cc
+ CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+ CXXFLAGS ?= -O3 -finline-functions -Wall
+else ifeq ($(PLATFORM),linux)
+ CC ?= gcc
+ CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes
+ CXXFLAGS ?= -O3 -finline-functions -Wall
+endif
+
+ifneq ($(PLATFORM),msys2)
+ CFLAGS += -fPIC
+ CXXFLAGS += -fPIC
+endif
+
+CFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+CXXFLAGS += -I"$(ERTS_INCLUDE_DIR)" -I"$(ERL_INTERFACE_INCLUDE_DIR)"
+
+LDLIBS += -L"$(ERL_INTERFACE_LIB_DIR)" -lei
+
+# Verbosity.
+
+c_verbose_0 = @echo " C " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
+c_verbose = $(c_verbose_$(V))
+
+cpp_verbose_0 = @echo " CPP " $(filter-out $(notdir $(MAKEFILE_LIST) $(C_SRC_ENV)),$(^F));
+cpp_verbose = $(cpp_verbose_$(V))
+
+link_verbose_0 = @echo " LD " $(@F);
+link_verbose = $(link_verbose_$(V))
+
+# Targets.
+
+ifeq ($(wildcard $(C_SRC_DIR)),)
+else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),)
+app:: app-c_src
+
+test-build:: app-c_src
+
+app-c_src:
+ $(MAKE) -C $(C_SRC_DIR)
+
+clean::
+ $(MAKE) -C $(C_SRC_DIR) clean
+
+else
+
+ifeq ($(SOURCES),)
+SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat))))
+endif
+OBJECTS = $(addsuffix .o, $(basename $(SOURCES)))
+
+COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c
+COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c
+
+app:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT_FILE)
+
+$(C_SRC_OUTPUT_FILE): $(OBJECTS)
+ $(verbose) mkdir -p $(dir $@)
+ $(link_verbose) $(CC) $(OBJECTS) \
+ $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \
+ -o $(C_SRC_OUTPUT_FILE)
+
+$(OBJECTS): $(MAKEFILE_LIST) $(C_SRC_ENV)
+
+%.o: %.c
+ $(COMPILE_C) $(OUTPUT_OPTION) $<
+
+%.o: %.cc
+ $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.C
+ $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+%.o: %.cpp
+ $(COMPILE_CPP) $(OUTPUT_OPTION) $<
+
+clean:: clean-c_src
+
+clean-c_src:
+ $(gen_verbose) rm -f $(C_SRC_OUTPUT_FILE) $(OBJECTS)
+
+endif
+
+ifneq ($(wildcard $(C_SRC_DIR)),)
+ERL_ERTS_DIR = $(shell $(ERL) -eval 'io:format("~s~n", [code:lib_dir(erts)]), halt().')
+
+$(C_SRC_ENV):
+ $(verbose) $(ERL) -eval "file:write_file(\"$(call core_native_path,$(C_SRC_ENV))\", \
+ io_lib:format( \
+ \"# Generated by Erlang.mk. Edit at your own risk!~n~n\" \
+ \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \
+ \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \
+ \"ERL_INTERFACE_LIB_DIR ?= ~s~n\" \
+ \"ERTS_DIR ?= $(ERL_ERTS_DIR)~n\", \
+ [code:root_dir(), erlang:system_info(version), \
+ code:lib_dir(erl_interface, include), \
+ code:lib_dir(erl_interface, lib)])), \
+ halt()."
+
+distclean:: distclean-c_src-env
+
+distclean-c_src-env:
+ $(gen_verbose) rm -f $(C_SRC_ENV)
+
+-include $(C_SRC_ENV)
+
+ifneq ($(ERL_ERTS_DIR),$(ERTS_DIR))
+$(shell rm -f $(C_SRC_ENV))
+endif
+endif
+
+# Templates.
+
+define bs_c_nif
+#include "erl_nif.h"
+
+static int loads = 0;
+
+static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
+{
+ /* Initialize private data. */
+ *priv_data = NULL;
+
+ loads++;
+
+ return 0;
+}
+
+static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
+{
+ /* Convert the private data to the new version. */
+ *priv_data = *old_priv_data;
+
+ loads++;
+
+ return 0;
+}
+
+static void unload(ErlNifEnv* env, void* priv_data)
+{
+ if (loads == 1) {
+ /* Destroy the private data. */
+ }
+
+ loads--;
+}
+
+static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{
+ if (enif_is_atom(env, argv[0])) {
+ return enif_make_tuple2(env,
+ enif_make_atom(env, "hello"),
+ argv[0]);
+ }
+
+ return enif_make_tuple2(env,
+ enif_make_atom(env, "error"),
+ enif_make_atom(env, "badarg"));
+}
+
+static ErlNifFunc nif_funcs[] = {
+ {"hello", 1, hello}
+};
+
+ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload)
+endef
+
+define bs_erl_nif
+-module($n).
+
+-export([hello/1]).
+
+-on_load(on_load/0).
+on_load() ->
+ PrivDir = case code:priv_dir(?MODULE) of
+ {error, _} ->
+ AppPath = filename:dirname(filename:dirname(code:which(?MODULE))),
+ filename:join(AppPath, "priv");
+ Path ->
+ Path
+ end,
+ erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0).
+
+hello(_) ->
+ erlang:nif_error({not_loaded, ?MODULE}).
+endef
+
+new-nif:
+ifneq ($(wildcard $(C_SRC_DIR)/$n.c),)
+ $(error Error: $(C_SRC_DIR)/$n.c already exists)
+endif
+ifneq ($(wildcard src/$n.erl),)
+ $(error Error: src/$n.erl already exists)
+endif
+ifndef n
+ $(error Usage: $(MAKE) new-nif n=NAME [in=APP])
+endif
+ifdef in
+ $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in=
+else
+ $(verbose) mkdir -p $(C_SRC_DIR) src/
+ $(verbose) $(call core_render,bs_c_nif,$(C_SRC_DIR)/$n.c)
+ $(verbose) $(call core_render,bs_erl_nif,src/$n.erl)
+endif
+
+# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ci ci-prepare ci-setup
+
+CI_OTP ?=
+CI_HIPE ?=
+CI_ERLLVM ?=
+
+ifeq ($(CI_VM),native)
+ERLC_OPTS += +native
+TEST_ERLC_OPTS += +native
+else ifeq ($(CI_VM),erllvm)
+ERLC_OPTS += +native +'{hipe, [to_llvm]}'
+TEST_ERLC_OPTS += +native +'{hipe, [to_llvm]}'
+endif
+
+ifeq ($(strip $(CI_OTP) $(CI_HIPE) $(CI_ERLLVM)),)
+ci::
+else
+
+ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM)))
+
+ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)))
+
+ci-setup::
+ $(verbose) :
+
+ci-extra::
+ $(verbose) :
+
+ci_verbose_0 = @echo " CI " $(1);
+ci_verbose = $(ci_verbose_$(V))
+
+define ci_target
+ci-$1: $(KERL_INSTALL_DIR)/$2
+ $(verbose) $(MAKE) --no-print-directory clean
+ $(ci_verbose) \
+ PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \
+ CI_OTP_RELEASE="$1" \
+ CT_OPTS="-label $1" \
+ CI_VM="$3" \
+ $(MAKE) ci-setup tests
+ $(verbose) $(MAKE) --no-print-directory ci-extra
+endef
+
+$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp),$(otp),otp)))
+$(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native)))
+$(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm)))
+
+$(foreach otp,$(filter-out $(ERLANG_OTP),$(CI_OTP)),$(eval $(call kerl_otp_target,$(otp))))
+$(foreach otp,$(filter-out $(ERLANG_HIPE),$(sort $(CI_HIPE) $(CI_ERLLLVM))),$(eval $(call kerl_hipe_target,$(otp))))
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Continuous Integration targets:" \
+ " ci Run '$(MAKE) tests' on all configured Erlang versions." \
+ "" \
+ "The CI_OTP variable must be defined with the Erlang versions" \
+ "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3"
+
+endif
+
+# Copyright (c) 2020, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifdef CONCUERROR_TESTS
+
+.PHONY: concuerror distclean-concuerror
+
+# Configuration
+
+CONCUERROR_LOGS_DIR ?= $(CURDIR)/logs
+CONCUERROR_OPTS ?=
+
+# Core targets.
+
+check:: concuerror
+
+ifndef KEEP_LOGS
+distclean:: distclean-concuerror
+endif
+
+# Plugin-specific targets.
+
+$(ERLANG_MK_TMP)/Concuerror/bin/concuerror: | $(ERLANG_MK_TMP)
+ $(verbose) git clone https://github.com/parapluu/Concuerror $(ERLANG_MK_TMP)/Concuerror
+ $(verbose) $(MAKE) -C $(ERLANG_MK_TMP)/Concuerror
+
+$(CONCUERROR_LOGS_DIR):
+ $(verbose) mkdir -p $(CONCUERROR_LOGS_DIR)
+
+define concuerror_html_report
+<!DOCTYPE html>
+<html lang="en">
+<head>
+<meta charset="utf-8">
+<title>Concuerror HTML report</title>
+</head>
+<body>
+<h1>Concuerror HTML report</h1>
+<p>Generated on $(concuerror_date)</p>
+<ul>
+$(foreach t,$(concuerror_targets),<li><a href="$(t).txt">$(t)</a></li>)
+</ul>
+</body>
+</html>
+endef
+
+concuerror: $(addprefix concuerror-,$(subst :,-,$(CONCUERROR_TESTS)))
+ $(eval concuerror_date := $(shell date))
+ $(eval concuerror_targets := $^)
+ $(verbose) $(call core_render,concuerror_html_report,$(CONCUERROR_LOGS_DIR)/concuerror.html)
+
+define concuerror_target
+.PHONY: concuerror-$1-$2
+
+concuerror-$1-$2: test-build | $(ERLANG_MK_TMP)/Concuerror/bin/concuerror $(CONCUERROR_LOGS_DIR)
+ $(ERLANG_MK_TMP)/Concuerror/bin/concuerror \
+ --pa $(CURDIR)/ebin --pa $(TEST_DIR) \
+ -o $(CONCUERROR_LOGS_DIR)/concuerror-$1-$2.txt \
+ $$(CONCUERROR_OPTS) -m $1 -t $2
+endef
+
+$(foreach test,$(CONCUERROR_TESTS),$(eval $(call concuerror_target,$(firstword $(subst :, ,$(test))),$(lastword $(subst :, ,$(test))))))
+
+distclean-concuerror:
+ $(gen_verbose) rm -rf $(CONCUERROR_LOGS_DIR)
+
+endif
+
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: ct apps-ct distclean-ct
+
+# Configuration.
+
+CT_OPTS ?=
+
+ifneq ($(wildcard $(TEST_DIR)),)
+ifndef CT_SUITES
+CT_SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl))))
+endif
+endif
+CT_SUITES ?=
+CT_LOGS_DIR ?= $(CURDIR)/logs
+
+# Core targets.
+
+tests:: ct
+
+ifndef KEEP_LOGS
+distclean:: distclean-ct
+endif
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Common_test targets:" \
+ " ct Run all the common_test suites for this project" \
+ "" \
+ "All your common_test suites have their associated targets." \
+ "A suite named http_SUITE can be ran using the ct-http target."
+
+# Plugin-specific targets.
+
+CT_RUN = ct_run \
+ -no_auto_compile \
+ -noinput \
+ -pa $(CURDIR)/ebin $(TEST_DIR) \
+ -dir $(TEST_DIR) \
+ -logdir $(CT_LOGS_DIR)
+
+ifeq ($(CT_SUITES),)
+ct: $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
+else
+# We do not run tests if we are in an apps/* with no test directory.
+ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1)
+ct: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-ct)
+ $(verbose) mkdir -p $(CT_LOGS_DIR)
+ $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS)
+endif
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+define ct_app_target
+apps-ct-$1: test-build
+ $$(MAKE) -C $1 ct IS_APP=1
+endef
+
+$(foreach app,$(ALL_APPS_DIRS),$(eval $(call ct_app_target,$(app))))
+
+apps-ct: $(addprefix apps-ct-,$(ALL_APPS_DIRS))
+endif
+
+ifdef t
+ifeq (,$(findstring :,$t))
+CT_EXTRA = -group $t
+else
+t_words = $(subst :, ,$t)
+CT_EXTRA = -group $(firstword $(t_words)) -case $(lastword $(t_words))
+endif
+else
+ifdef c
+CT_EXTRA = -case $c
+else
+CT_EXTRA =
+endif
+endif
+
+define ct_suite_target
+ct-$(1): test-build
+ $(verbose) mkdir -p $(CT_LOGS_DIR)
+ $(gen_verbose_esc) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS)
+endef
+
+$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test))))
+
+distclean-ct:
+ $(gen_verbose) rm -rf $(CT_LOGS_DIR)
+
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: plt distclean-plt dialyze
+
+# Configuration.
+
+DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt
+export DIALYZER_PLT
+
+PLT_APPS ?=
+DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS)
+DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs
+DIALYZER_PLT_OPTS ?=
+
+# Core targets.
+
+check:: dialyze
+
+distclean:: distclean-plt
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Dialyzer targets:" \
+ " plt Build a PLT file for this project" \
+ " dialyze Analyze the project using Dialyzer"
+
+# Plugin-specific targets.
+
+define filter_opts.erl
+ Opts = init:get_plain_arguments(),
+ {Filtered, _} = lists:foldl(fun
+ (O, {Os, true}) -> {[O|Os], false};
+ (O = "-D", {Os, _}) -> {[O|Os], true};
+ (O = [\\$$-, \\$$D, _ | _], {Os, _}) -> {[O|Os], false};
+ (O = "-I", {Os, _}) -> {[O|Os], true};
+ (O = [\\$$-, \\$$I, _ | _], {Os, _}) -> {[O|Os], false};
+ (O = "-pa", {Os, _}) -> {[O|Os], true};
+ (_, Acc) -> Acc
+ end, {[], false}, Opts),
+ io:format("~s~n", [string:join(lists:reverse(Filtered), " ")]),
+ halt().
+endef
+
+# DIALYZER_PLT is a variable understood directly by Dialyzer.
+#
+# We append the path to erts at the end of the PLT. This works
+# because the PLT file is in the external term format and the
+# function binary_to_term/1 ignores any trailing data.
+$(DIALYZER_PLT): deps app
+ $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \
+ while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log))
+ $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \
+ erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) || test $$? -eq 2
+ $(verbose) $(ERL) -eval 'io:format("~n~s~n", [code:lib_dir(erts)]), halt().' >> $@
+
+plt: $(DIALYZER_PLT)
+
+distclean-plt:
+ $(gen_verbose) rm -f $(DIALYZER_PLT)
+
+ifneq ($(wildcard $(DIALYZER_PLT)),)
+dialyze: $(if $(filter --src,$(DIALYZER_DIRS)),,deps app)
+ $(verbose) if ! tail -n1 $(DIALYZER_PLT) | \
+ grep -q "^`$(ERL) -eval 'io:format("~s", [code:lib_dir(erts)]), halt().'`$$"; then \
+ rm $(DIALYZER_PLT); \
+ $(MAKE) plt; \
+ fi
+else
+dialyze: $(DIALYZER_PLT)
+endif
+ $(verbose) dialyzer --no_native `$(ERL) \
+ -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" \
+ -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) $(if $(wildcard ebin/),-pa ebin/)
+
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-edoc edoc
+
+# Configuration.
+
+EDOC_OPTS ?=
+EDOC_SRC_DIRS ?=
+EDOC_OUTPUT ?= doc
+
+define edoc.erl
+ SrcPaths = lists:foldl(fun(P, Acc) ->
+ filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc
+ end, [], [$(call comma_list,$(patsubst %,'%',$(call core_native_path,$(EDOC_SRC_DIRS))))]),
+ DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}],
+ edoc:application($(1), ".", [$(2)] ++ DefaultOpts),
+ halt(0).
+endef
+
+# Core targets.
+
+ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),)
+docs:: edoc
+endif
+
+distclean:: distclean-edoc
+
+# Plugin-specific targets.
+
+edoc: distclean-edoc doc-deps
+ $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS)))
+
+distclean-edoc:
+ $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info
+
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Configuration.
+
+DTL_FULL_PATH ?=
+DTL_PATH ?= templates/
+DTL_PREFIX ?=
+DTL_SUFFIX ?= _dtl
+DTL_OPTS ?=
+
+# Verbosity.
+
+dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F));
+dtl_verbose = $(dtl_verbose_$(V))
+
+# Core targets.
+
+DTL_PATH := $(abspath $(DTL_PATH))
+DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl))
+
+ifneq ($(DTL_FILES),)
+
+DTL_NAMES = $(addprefix $(DTL_PREFIX),$(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)))
+DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES)))
+BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES)))
+
+ifneq ($(words $(DTL_FILES)),0)
+# Rebuild templates when the Makefile changes.
+$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | $(ERLANG_MK_TMP)
+ $(verbose) if test -f $@; then \
+ touch $(DTL_FILES); \
+ fi
+ $(verbose) touch $@
+
+ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl
+endif
+
+define erlydtl_compile.erl
+ [begin
+ Module0 = case "$(strip $(DTL_FULL_PATH))" of
+ "" ->
+ filename:basename(F, ".dtl");
+ _ ->
+ "$(call core_native_path,$(DTL_PATH))/" ++ F2 = filename:rootname(F, ".dtl"),
+ re:replace(F2, "/", "_", [{return, list}, global])
+ end,
+ Module = list_to_atom("$(DTL_PREFIX)" ++ string:to_lower(Module0) ++ "$(DTL_SUFFIX)"),
+ case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of
+ ok -> ok;
+ {ok, _} -> ok
+ end
+ end || F <- string:tokens("$(1)", " ")],
+ halt().
+endef
+
+ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/
+ $(if $(strip $?),\
+ $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\
+ -pa ebin/))
+
+endif
+
+# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-escript escript escript-zip
+
+# Configuration.
+
+ESCRIPT_NAME ?= $(PROJECT)
+ESCRIPT_FILE ?= $(ESCRIPT_NAME)
+
+ESCRIPT_SHEBANG ?= /usr/bin/env escript
+ESCRIPT_COMMENT ?= This is an -*- erlang -*- file
+ESCRIPT_EMU_ARGS ?= -escript main $(ESCRIPT_NAME)
+
+ESCRIPT_ZIP ?= 7z a -tzip -mx=9 -mtc=off $(if $(filter-out 0,$(V)),,> /dev/null)
+ESCRIPT_ZIP_FILE ?= $(ERLANG_MK_TMP)/escript.zip
+
+# Core targets.
+
+distclean:: distclean-escript
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Escript targets:" \
+ " escript Build an executable escript archive" \
+
+# Plugin-specific targets.
+
+escript-zip:: FULL=1
+escript-zip:: deps app
+ $(verbose) mkdir -p $(dir $(ESCRIPT_ZIP))
+ $(verbose) rm -f $(ESCRIPT_ZIP_FILE)
+ $(gen_verbose) cd .. && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) $(PROJECT)/ebin/*
+ifneq ($(DEPS),)
+ $(verbose) cd $(DEPS_DIR) && $(ESCRIPT_ZIP) $(ESCRIPT_ZIP_FILE) \
+ $(subst $(DEPS_DIR)/,,$(addsuffix /*,$(wildcard \
+ $(addsuffix /ebin,$(shell cat $(ERLANG_MK_TMP)/deps.log)))))
+endif
+
+escript:: escript-zip
+ $(gen_verbose) printf "%s\n" \
+ "#!$(ESCRIPT_SHEBANG)" \
+ "%% $(ESCRIPT_COMMENT)" \
+ "%%! $(ESCRIPT_EMU_ARGS)" > $(ESCRIPT_FILE)
+ $(verbose) cat $(ESCRIPT_ZIP_FILE) >> $(ESCRIPT_FILE)
+ $(verbose) chmod +x $(ESCRIPT_FILE)
+
+distclean-escript:
+ $(gen_verbose) rm -f $(ESCRIPT_FILE)
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: eunit apps-eunit
+
+# Configuration
+
+EUNIT_OPTS ?=
+EUNIT_ERL_OPTS ?=
+
+# Core targets.
+
+tests:: eunit
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "EUnit targets:" \
+ " eunit Run all the EUnit tests for this project"
+
+# Plugin-specific targets.
+
+define eunit.erl
+ $(call cover.erl)
+ CoverSetup(),
+ case eunit:test($1, [$(EUNIT_OPTS)]) of
+ ok -> ok;
+ error -> halt(2)
+ end,
+ CoverExport("$(call core_native_path,$(COVER_DATA_DIR))/eunit.coverdata"),
+ halt()
+endef
+
+EUNIT_ERL_OPTS += -pa $(TEST_DIR) $(CURDIR)/ebin
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+eunit: test-build cover-data-dir
+ $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS))
+else
+eunit: test-build cover-data-dir
+ $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS))
+endif
+else
+EUNIT_EBIN_MODS = $(notdir $(basename $(ERL_FILES) $(BEAM_FILES)))
+EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.erl)))
+
+EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \
+ $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)')
+
+eunit: test-build $(if $(IS_APP)$(ROOT_DIR),,apps-eunit) cover-data-dir
+ifneq ($(wildcard src/ $(TEST_DIR)),)
+ $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS))
+endif
+
+ifneq ($(ALL_APPS_DIRS),)
+apps-eunit: test-build
+ $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \
+ [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \
+ exit $$eunit_retcode
+endif
+endif
+
+# Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper)
+.PHONY: proper
+
+# Targets.
+
+tests:: proper
+
+define proper_check.erl
+ $(call cover.erl)
+ code:add_pathsa([
+ "$(call core_native_path,$(CURDIR)/ebin)",
+ "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
+ "$(call core_native_path,$(TEST_DIR))"]),
+ Module = fun(M) ->
+ [true] =:= lists:usort([
+ case atom_to_list(F) of
+ "prop_" ++ _ ->
+ io:format("Testing ~p:~p/0~n", [M, F]),
+ proper:quickcheck(M:F(), nocolors);
+ _ ->
+ true
+ end
+ || {F, 0} <- M:module_info(exports)])
+ end,
+ try begin
+ CoverSetup(),
+ Res = case $(1) of
+ all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]);
+ module -> Module($(2));
+ function -> proper:quickcheck($(2), nocolors)
+ end,
+ CoverExport("$(COVER_DATA_DIR)/proper.coverdata"),
+ Res
+ end of
+ true -> halt(0);
+ _ -> halt(1)
+ catch error:undef ->
+ io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
+ halt(0)
+ end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+proper: test-build cover-data-dir
+ $(verbose) $(call erlang,$(call proper_check.erl,module,$(t)))
+else
+proper: test-build cover-data-dir
+ $(verbose) echo Testing $(t)/0
+ $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)()))
+endif
+else
+proper: test-build cover-data-dir
+ $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+ $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
+ $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Verbosity.
+
+proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F));
+proto_verbose = $(proto_verbose_$(V))
+
+# Core targets.
+
+ifneq ($(wildcard src/),)
+ifneq ($(filter gpb protobuffs,$(BUILD_DEPS) $(DEPS)),)
+PROTO_FILES := $(filter %.proto,$(ALL_SRC_FILES))
+ERL_FILES += $(addprefix src/,$(patsubst %.proto,%_pb.erl,$(notdir $(PROTO_FILES))))
+
+ifeq ($(PROTO_FILES),)
+$(ERLANG_MK_TMP)/last-makefile-change-protobuffs:
+ $(verbose) :
+else
+# Rebuild proto files when the Makefile changes.
+# We exclude $(PROJECT).d to avoid a circular dependency.
+$(ERLANG_MK_TMP)/last-makefile-change-protobuffs: $(filter-out $(PROJECT).d,$(MAKEFILE_LIST)) | $(ERLANG_MK_TMP)
+ $(verbose) if test -f $@; then \
+ touch $(PROTO_FILES); \
+ fi
+ $(verbose) touch $@
+
+$(PROJECT).d:: $(ERLANG_MK_TMP)/last-makefile-change-protobuffs
+endif
+
+ifeq ($(filter gpb,$(BUILD_DEPS) $(DEPS)),)
+define compile_proto.erl
+ [begin
+ protobuffs_compile:generate_source(F, [
+ {output_include_dir, "./include"},
+ {output_src_dir, "./src"}])
+ end || F <- string:tokens("$1", " ")],
+ halt().
+endef
+else
+define compile_proto.erl
+ [begin
+ gpb_compile:file(F, [
+ {include_as_lib, true},
+ {module_name_suffix, "_pb"},
+ {o_hrl, "./include"},
+ {o_erl, "./src"}])
+ end || F <- string:tokens("$1", " ")],
+ halt().
+endef
+endif
+
+ifneq ($(PROTO_FILES),)
+$(PROJECT).d:: $(PROTO_FILES)
+ $(verbose) mkdir -p ebin/ include/
+ $(if $(strip $?),$(proto_verbose) $(call erlang,$(call compile_proto.erl,$?)))
+endif
+endif
+endif
+
+# Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: relx-rel relx-relup distclean-relx-rel run
+
+# Configuration.
+
+RELX ?= $(ERLANG_MK_TMP)/relx
+RELX_CONFIG ?= $(CURDIR)/relx.config
+
+RELX_URL ?= https://erlang.mk/res/relx-v3.27.0
+RELX_OPTS ?=
+RELX_OUTPUT_DIR ?= _rel
+RELX_REL_EXT ?=
+RELX_TAR ?= 1
+
+ifdef SFX
+ RELX_TAR = 1
+endif
+
+ifeq ($(firstword $(RELX_OPTS)),-o)
+ RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS))
+else
+ RELX_OPTS += -o $(RELX_OUTPUT_DIR)
+endif
+
+# Core targets.
+
+ifeq ($(IS_DEP),)
+ifneq ($(wildcard $(RELX_CONFIG)),)
+rel:: relx-rel
+
+relup:: relx-relup
+endif
+endif
+
+distclean:: distclean-relx-rel
+
+# Plugin-specific targets.
+
+$(RELX): | $(ERLANG_MK_TMP)
+ $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL))
+ $(verbose) chmod +x $(RELX)
+
+relx-rel: $(RELX) rel-deps app
+ $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
+ $(verbose) $(MAKE) relx-post-rel
+ifeq ($(RELX_TAR),1)
+ $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) tar
+endif
+
+relx-relup: $(RELX) rel-deps app
+ $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) release
+ $(MAKE) relx-post-rel
+ $(verbose) $(RELX) $(if $(filter 1,$V),-V 3) -c $(RELX_CONFIG) $(RELX_OPTS) relup $(if $(filter 1,$(RELX_TAR)),tar)
+
+distclean-relx-rel:
+ $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR)
+
+# Default hooks.
+relx-post-rel::
+ $(verbose) :
+
+# Run target.
+
+ifeq ($(wildcard $(RELX_CONFIG)),)
+run::
+else
+
+define get_relx_release.erl
+ {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"),
+ {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config),
+ Vsn = case Vsn0 of
+ {cmd, Cmd} -> os:cmd(Cmd);
+ semver -> "";
+ {semver, _} -> "";
+ VsnStr -> Vsn0
+ end,
+ Extended = case lists:keyfind(extended_start_script, 1, Config) of
+ {_, true} -> "1";
+ _ -> ""
+ end,
+ io:format("~s ~s ~s", [Name, Vsn, Extended]),
+ halt(0).
+endef
+
+RELX_REL := $(shell $(call erlang,$(get_relx_release.erl)))
+RELX_REL_NAME := $(word 1,$(RELX_REL))
+RELX_REL_VSN := $(word 2,$(RELX_REL))
+RELX_REL_CMD := $(if $(word 3,$(RELX_REL)),console)
+
+ifeq ($(PLATFORM),msys2)
+RELX_REL_EXT := .cmd
+endif
+
+run:: all
+ $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) $(RELX_REL_CMD)
+
+ifdef RELOAD
+rel::
+ $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) ping
+ $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) \
+ eval "io:format(\"~p~n\", [c:lm()])"
+endif
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Relx targets:" \
+ " run Compile the project, build the release and run it"
+
+endif
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2014, M Robert Martin <rob@version2beta.com>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: shell
+
+# Configuration.
+
+SHELL_ERL ?= erl
+SHELL_PATHS ?= $(CURDIR)/ebin $(TEST_DIR)
+SHELL_OPTS ?=
+
+ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS))
+
+# Core targets
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Shell targets:" \
+ " shell Run an erlang shell with SHELL_OPTS or reasonable default"
+
+# Plugin-specific targets.
+
+$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep))))
+
+ifneq ($(SKIP_DEPS),)
+build-shell-deps:
+else
+build-shell-deps: $(ALL_SHELL_DEPS_DIRS)
+ $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do \
+ if [ -z "$(strip $(FULL))" ] && [ ! -L $$dep ] && [ -f $$dep/ebin/dep_built ]; then \
+ :; \
+ else \
+ $(MAKE) -C $$dep IS_DEP=1; \
+ if [ ! -L $$dep ] && [ -d $$dep/ebin ]; then touch $$dep/ebin/dep_built; fi; \
+ fi \
+ done
+endif
+
+shell:: build-shell-deps
+ $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS)
+
+# Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: distclean-sphinx sphinx
+
+# Configuration.
+
+SPHINX_BUILD ?= sphinx-build
+SPHINX_SOURCE ?= doc
+SPHINX_CONFDIR ?=
+SPHINX_FORMATS ?= html
+SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees
+SPHINX_OPTS ?=
+
+#sphinx_html_opts =
+#sphinx_html_output = html
+#sphinx_man_opts =
+#sphinx_man_output = man
+#sphinx_latex_opts =
+#sphinx_latex_output = latex
+
+# Helpers.
+
+sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q
+sphinx_build_1 = $(SPHINX_BUILD) -N
+sphinx_build_2 = set -x; $(SPHINX_BUILD)
+sphinx_build = $(sphinx_build_$(V))
+
+define sphinx.build
+$(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1)
+
+endef
+
+define sphinx.output
+$(if $(sphinx_$1_output),$(sphinx_$1_output),$1)
+endef
+
+# Targets.
+
+ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),)
+docs:: sphinx
+distclean:: distclean-sphinx
+endif
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Sphinx targets:" \
+ " sphinx Generate Sphinx documentation." \
+ "" \
+ "ReST sources and 'conf.py' file are expected in directory pointed by" \
+ "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \
+ "'html' format is generated by default); target directory can be specified by" \
+ 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \
+ "Additional Sphinx options can be set in SPHINX_OPTS."
+
+# Plugin-specific targets.
+
+sphinx:
+ $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F))
+
+distclean-sphinx:
+ $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F)))
+
+# Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is contributed to erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS
+
+show-ERL_LIBS:
+ @echo $(ERL_LIBS)
+
+show-ERLC_OPTS:
+ @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
+
+show-TEST_ERLC_OPTS:
+ @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";)
+
+# Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq)
+.PHONY: triq
+
+# Targets.
+
+tests:: triq
+
+define triq_check.erl
+ $(call cover.erl)
+ code:add_pathsa([
+ "$(call core_native_path,$(CURDIR)/ebin)",
+ "$(call core_native_path,$(DEPS_DIR)/*/ebin)",
+ "$(call core_native_path,$(TEST_DIR))"]),
+ try begin
+ CoverSetup(),
+ Res = case $(1) of
+ all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]);
+ module -> triq:check($(2));
+ function -> triq:check($(2))
+ end,
+ CoverExport("$(COVER_DATA_DIR)/triq.coverdata"),
+ Res
+ end of
+ true -> halt(0);
+ _ -> halt(1)
+ catch error:undef ->
+ io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]),
+ halt(0)
+ end.
+endef
+
+ifdef t
+ifeq (,$(findstring :,$(t)))
+triq: test-build cover-data-dir
+ $(verbose) $(call erlang,$(call triq_check.erl,module,$(t)))
+else
+triq: test-build cover-data-dir
+ $(verbose) echo Testing $(t)/0
+ $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)()))
+endif
+else
+triq: test-build cover-data-dir
+ $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \
+ $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam))))))
+ $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES)))
+endif
+endif
+
+# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015, Erlang Solutions Ltd.
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: xref distclean-xref
+
+# Configuration.
+
+ifeq ($(XREF_CONFIG),)
+ XREFR_ARGS :=
+else
+ XREFR_ARGS := -c $(XREF_CONFIG)
+endif
+
+XREFR ?= $(CURDIR)/xrefr
+export XREFR
+
+XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr
+
+# Core targets.
+
+help::
+ $(verbose) printf '%s\n' '' \
+ 'Xref targets:' \
+ ' xref Run Xrefr using $$XREF_CONFIG as config file if defined'
+
+distclean:: distclean-xref
+
+# Plugin-specific targets.
+
+$(XREFR):
+ $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL))
+ $(verbose) chmod +x $(XREFR)
+
+xref: deps app $(XREFR)
+ $(gen_verbose) $(XREFR) $(XREFR_ARGS)
+
+distclean-xref:
+ $(gen_verbose) rm -rf $(XREFR)
+
+# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+COVER_REPORT_DIR ?= cover
+COVER_DATA_DIR ?= $(COVER_REPORT_DIR)
+
+ifdef COVER
+COVER_APPS ?= $(notdir $(ALL_APPS_DIRS))
+COVER_DEPS ?=
+endif
+
+# Code coverage for Common Test.
+
+ifdef COVER
+ifdef CT_RUN
+ifneq ($(wildcard $(TEST_DIR)),)
+test-build:: $(TEST_DIR)/ct.cover.spec
+
+$(TEST_DIR)/ct.cover.spec: cover-data-dir
+ $(gen_verbose) printf "%s\n" \
+ "{incl_app, '$(PROJECT)', details}." \
+ "{incl_dirs, '$(PROJECT)', [\"$(call core_native_path,$(CURDIR)/ebin)\" \
+ $(foreach a,$(COVER_APPS),$(comma) \"$(call core_native_path,$(APPS_DIR)/$a/ebin)\") \
+ $(foreach d,$(COVER_DEPS),$(comma) \"$(call core_native_path,$(DEPS_DIR)/$d/ebin)\")]}." \
+ '{export,"$(call core_native_path,$(abspath $(COVER_DATA_DIR))/ct.coverdata)"}.' > $@
+
+CT_RUN += -cover $(TEST_DIR)/ct.cover.spec
+endif
+endif
+endif
+
+# Code coverage for other tools.
+
+ifdef COVER
+define cover.erl
+ CoverSetup = fun() ->
+ Dirs = ["$(call core_native_path,$(CURDIR)/ebin)"
+ $(foreach a,$(COVER_APPS),$(comma) "$(call core_native_path,$(APPS_DIR)/$a/ebin)")
+ $(foreach d,$(COVER_DEPS),$(comma) "$(call core_native_path,$(DEPS_DIR)/$d/ebin)")],
+ [begin
+ case filelib:is_dir(Dir) of
+ false -> false;
+ true ->
+ case cover:compile_beam_directory(Dir) of
+ {error, _} -> halt(1);
+ _ -> true
+ end
+ end
+ end || Dir <- Dirs]
+ end,
+ CoverExport = fun(Filename) -> cover:export(Filename) end,
+endef
+else
+define cover.erl
+ CoverSetup = fun() -> ok end,
+ CoverExport = fun(_) -> ok end,
+endef
+endif
+
+# Core targets
+
+ifdef COVER
+ifneq ($(COVER_REPORT_DIR),)
+tests::
+ $(verbose) $(MAKE) --no-print-directory cover-report
+endif
+
+cover-data-dir: | $(COVER_DATA_DIR)
+
+$(COVER_DATA_DIR):
+ $(verbose) mkdir -p $(COVER_DATA_DIR)
+else
+cover-data-dir:
+endif
+
+clean:: coverdata-clean
+
+ifneq ($(COVER_REPORT_DIR),)
+distclean:: cover-report-clean
+endif
+
+help::
+ $(verbose) printf "%s\n" "" \
+ "Cover targets:" \
+ " cover-report Generate a HTML coverage report from previously collected" \
+ " cover data." \
+ " all.coverdata Merge all coverdata files into all.coverdata." \
+ "" \
+ "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \
+ "target tests additionally generates a HTML coverage report from the combined" \
+ "coverdata files from each of these testing tools. HTML reports can be disabled" \
+ "by setting COVER_REPORT_DIR to empty."
+
+# Plugin specific targets
+
+COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata))
+
+.PHONY: coverdata-clean
+coverdata-clean:
+ $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec
+
+# Merge all coverdata files into one.
+define cover_export.erl
+ $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+ cover:export("$(COVER_DATA_DIR)/$@"), halt(0).
+endef
+
+all.coverdata: $(COVERDATA) cover-data-dir
+ $(gen_verbose) $(call erlang,$(cover_export.erl))
+
+# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to
+# empty if you want the coverdata files but not the HTML report.
+ifneq ($(COVER_REPORT_DIR),)
+
+.PHONY: cover-report-clean cover-report
+
+cover-report-clean:
+ $(gen_verbose) rm -rf $(COVER_REPORT_DIR)
+ifneq ($(COVER_REPORT_DIR),$(COVER_DATA_DIR))
+ $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR))
+endif
+
+ifeq ($(COVERDATA),)
+cover-report:
+else
+
+# Modules which include eunit.hrl always contain one line without coverage
+# because eunit defines test/0 which is never called. We compensate for this.
+EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \
+ grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \
+ | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq))
+
+define cover_report.erl
+ $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),)
+ Ms = cover:imported_modules(),
+ [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M)
+ ++ ".COVER.html", [html]) || M <- Ms],
+ Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms],
+ EunitHrlMods = [$(EUNIT_HRL_MODS)],
+ Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of
+ true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report],
+ TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]),
+ TotalN = lists:sum([N || {_, {_, N}} <- Report1]),
+ Perc = fun(Y, N) -> case Y + N of 0 -> 100; S -> round(100 * Y / S) end end,
+ TotalPerc = Perc(TotalY, TotalN),
+ {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]),
+ io:format(F, "<!DOCTYPE html><html>~n"
+ "<head><meta charset=\"UTF-8\">~n"
+ "<title>Coverage report</title></head>~n"
+ "<body>~n", []),
+ io:format(F, "<h1>Coverage</h1>~n<p>Total: ~p%</p>~n", [TotalPerc]),
+ io:format(F, "<table><tr><th>Module</th><th>Coverage</th></tr>~n", []),
+ [io:format(F, "<tr><td><a href=\"~p.COVER.html\">~p</a></td>"
+ "<td>~p%</td></tr>~n",
+ [M, M, Perc(Y, N)]) || {M, {Y, N}} <- Report1],
+ How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))",
+ Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")",
+ io:format(F, "</table>~n"
+ "<p>Generated using ~s and erlang.mk on ~s.</p>~n"
+ "</body></html>", [How, Date]),
+ halt().
+endef
+
+cover-report:
+ $(verbose) mkdir -p $(COVER_REPORT_DIR)
+ $(gen_verbose) $(call erlang,$(cover_report.erl))
+
+endif
+endif # ifneq ($(COVER_REPORT_DIR),)
+
+# Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+.PHONY: sfx
+
+ifdef RELX_REL
+ifdef SFX
+
+# Configuration.
+
+SFX_ARCHIVE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/$(RELX_REL_NAME)-$(RELX_REL_VSN).tar.gz
+SFX_OUTPUT_FILE ?= $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME).run
+
+# Core targets.
+
+rel:: sfx
+
+# Plugin-specific targets.
+
+define sfx_stub
+#!/bin/sh
+
+TMPDIR=`mktemp -d`
+ARCHIVE=`awk '/^__ARCHIVE_BELOW__$$/ {print NR + 1; exit 0;}' $$0`
+FILENAME=$$(basename $$0)
+REL=$${FILENAME%.*}
+
+tail -n+$$ARCHIVE $$0 | tar -xzf - -C $$TMPDIR
+
+$$TMPDIR/bin/$$REL console
+RET=$$?
+
+rm -rf $$TMPDIR
+
+exit $$RET
+
+__ARCHIVE_BELOW__
+endef
+
+sfx:
+ $(verbose) $(call core_render,sfx_stub,$(SFX_OUTPUT_FILE))
+ $(gen_verbose) cat $(SFX_ARCHIVE) >> $(SFX_OUTPUT_FILE)
+ $(verbose) chmod +x $(SFX_OUTPUT_FILE)
+
+endif
+endif
+
+# Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# External plugins.
+
+DEP_PLUGINS ?=
+
+$(foreach p,$(DEP_PLUGINS),\
+ $(eval $(if $(findstring /,$p),\
+ $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\
+ $(call core_dep_plugin,$p/plugins.mk,$p))))
+
+help:: help-plugins
+
+help-plugins::
+ $(verbose) :
+
+# Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu>
+# Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com>
+# This file is part of erlang.mk and subject to the terms of the ISC License.
+
+# Fetch dependencies recursively (without building them).
+
+.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \
+ fetch-shell-deps
+
+.PHONY: $(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+ $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+ $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+ $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+ $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+fetch-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+fetch-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+fetch-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+fetch-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+fetch-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+ifneq ($(SKIP_DEPS),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST):
+ $(verbose) :> $@
+else
+# By default, we fetch "normal" dependencies. They are also included no
+# matter the type of requested dependencies.
+#
+# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS).
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS)
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): $(LOCAL_DEPS_DIRS) $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS)
+
+# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of
+# dependencies with a single target.
+ifneq ($(filter doc,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_DOC_DEPS_DIRS)
+endif
+ifneq ($(filter rel,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_REL_DEPS_DIRS)
+endif
+ifneq ($(filter test,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_TEST_DEPS_DIRS)
+endif
+ifneq ($(filter shell,$(DEP_TYPES)),)
+$(ERLANG_MK_RECURSIVE_DEPS_LIST): $(ALL_SHELL_DEPS_DIRS)
+endif
+
+ERLANG_MK_RECURSIVE_TMP_LIST := $(abspath $(ERLANG_MK_TMP)/recursive-tmp-deps-$(shell echo $$PPID).log)
+
+$(ERLANG_MK_RECURSIVE_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \
+$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): | $(ERLANG_MK_TMP)
+ifeq ($(IS_APP)$(IS_DEP),)
+ $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+ $(verbose) touch $(ERLANG_MK_RECURSIVE_TMP_LIST)
+ $(verbose) set -e; for dep in $^ ; do \
+ if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \
+ echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \
+ if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \
+ $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \
+ $(MAKE) -C $$dep fetch-deps \
+ IS_DEP=1 \
+ ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \
+ fi \
+ fi \
+ done
+ifeq ($(IS_APP)$(IS_DEP),)
+ $(verbose) sort < $(ERLANG_MK_RECURSIVE_TMP_LIST) | \
+ uniq > $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
+ $(verbose) cmp -s $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@ \
+ || mv $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted $@
+ $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST).sorted
+ $(verbose) rm $(ERLANG_MK_RECURSIVE_TMP_LIST)
+endif
+endif # ifneq ($(SKIP_DEPS),)
+
+# List dependencies recursively.
+
+.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \
+ list-shell-deps
+
+list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST)
+list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST)
+list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST)
+list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST)
+list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST)
+
+list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps:
+ $(verbose) cat $^
+
+# Query dependencies recursively.
+
+.PHONY: query-deps query-doc-deps query-rel-deps query-test-deps \
+ query-shell-deps
+
+QUERY ?= name fetch_method repo version
+
+define query_target
+$(1): $(2) clean-tmp-query.log
+ifeq ($(IS_APP)$(IS_DEP),)
+ $(verbose) rm -f $(4)
+endif
+ $(verbose) $(foreach dep,$(3),\
+ echo $(PROJECT): $(foreach q,$(QUERY),$(call query_$(q),$(dep))) >> $(4) ;)
+ $(if $(filter-out query-deps,$(1)),,\
+ $(verbose) set -e; for dep in $(3) ; do \
+ if grep -qs ^$$$$dep$$$$ $(ERLANG_MK_TMP)/query.log; then \
+ :; \
+ else \
+ echo $$$$dep >> $(ERLANG_MK_TMP)/query.log; \
+ $(MAKE) -C $(DEPS_DIR)/$$$$dep $$@ QUERY="$(QUERY)" IS_DEP=1 || true; \
+ fi \
+ done)
+ifeq ($(IS_APP)$(IS_DEP),)
+ $(verbose) touch $(4)
+ $(verbose) cat $(4)
+endif
+endef
+
+clean-tmp-query.log:
+ifeq ($(IS_DEP),)
+ $(verbose) rm -f $(ERLANG_MK_TMP)/query.log
+endif
+
+$(eval $(call query_target,query-deps,$(ERLANG_MK_RECURSIVE_DEPS_LIST),$(BUILD_DEPS) $(DEPS),$(ERLANG_MK_QUERY_DEPS_FILE)))
+$(eval $(call query_target,query-doc-deps,$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST),$(DOC_DEPS),$(ERLANG_MK_QUERY_DOC_DEPS_FILE)))
+$(eval $(call query_target,query-rel-deps,$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST),$(REL_DEPS),$(ERLANG_MK_QUERY_REL_DEPS_FILE)))
+$(eval $(call query_target,query-test-deps,$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST),$(TEST_DEPS),$(ERLANG_MK_QUERY_TEST_DEPS_FILE)))
+$(eval $(call query_target,query-shell-deps,$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST),$(SHELL_DEPS),$(ERLANG_MK_QUERY_SHELL_DEPS_FILE)))
diff --git a/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_recv.pl b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_recv.pl
new file mode 100755
index 0000000000..7b8b9cce0c
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_recv.pl
@@ -0,0 +1,13 @@
+#!/usr/bin/perl -w
+# subscribe to messages from the queue 'foo'
+use Net::Stomp;
+my $stomp = Net::Stomp->new({hostname=>'localhost', port=>'61613'});
+$stomp->connect({login=>'guest', passcode=>'guest'});
+$stomp->subscribe({'destination'=>'/queue/foo', 'ack'=>'client'});
+while (1) {
+ my $frame = $stomp->receive_frame;
+ print $frame->body . "\n";
+ $stomp->ack({frame=>$frame});
+ last if $frame->body eq 'QUIT';
+}
+$stomp->disconnect;
diff --git a/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_client.pl b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_client.pl
new file mode 100755
index 0000000000..b3e5ee6fd3
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_client.pl
@@ -0,0 +1,14 @@
+#!/usr/bin/perl -w
+
+use Net::Stomp;
+my $stomp = Net::Stomp->new({hostname=>'localhost', port=>'61613'});
+$stomp->connect({login=>'guest', passcode=>'guest'});
+
+my $private_q_name = "/temp-queue/test";
+
+$stomp->send({destination => '/queue/rabbitmq_stomp_rpc_service',
+ 'reply-to' => $private_q_name,
+ body => "request from $private_q_name"});
+print "Reply: " . $stomp->receive_frame->body . "\n";
+
+$stomp->disconnect;
diff --git a/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_service.pl b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_service.pl
new file mode 100755
index 0000000000..31e79aea4a
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_rpc_service.pl
@@ -0,0 +1,21 @@
+#!/usr/bin/perl -w
+
+use Net::Stomp;
+
+my $stomp = Net::Stomp->new({hostname=>'localhost', port=>'61613'});
+$stomp->connect({login=>'guest', passcode=>'guest'});
+
+$stomp->subscribe({'destination'=>'/queue/rabbitmq_stomp_rpc_service', 'ack'=>'client'});
+while (1) {
+ print "Waiting for request...\n";
+ my $frame = $stomp->receive_frame;
+ print "Received message, reply_to = " . $frame->headers->{"reply-to"} . "\n";
+ print $frame->body . "\n";
+
+ $stomp->send({destination => $frame->headers->{"reply-to"}, bytes_message => 1,
+ body => "Got body: " . $frame->body});
+ $stomp->ack({frame=>$frame});
+ last if $frame->body eq 'QUIT';
+}
+
+$stomp->disconnect;
diff --git a/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send.pl b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send.pl
new file mode 100755
index 0000000000..4d26b7837e
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send.pl
@@ -0,0 +1,9 @@
+#!/usr/bin/perl -w
+# send a message to the queue 'foo'
+use Net::Stomp;
+my $stomp = Net::Stomp->new({hostname=>'localhost', port=>'61613'});
+$stomp->connect({login=>'guest', passcode=>'guest'});
+$stomp->send({destination=>'/exchange/amq.fanout',
+ bytes_message=>1,
+ body=>($ARGV[0] or "test\0message")});
+$stomp->disconnect;
diff --git a/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send_many.pl b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send_many.pl
new file mode 100755
index 0000000000..f6ff54ed95
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_send_many.pl
@@ -0,0 +1,11 @@
+#!/usr/bin/perl -w
+# send a message to the queue 'foo'
+use Net::Stomp;
+my $stomp = Net::Stomp->new({hostname=>'localhost', port=>'61613'});
+$stomp->connect({login=>'guest', passcode=>'guest'});
+for (my $i = 0; $i < 10000; $i++) {
+ $stomp->send({destination=>'/queue/foo',
+ bytes_message=>1,
+ body=>($ARGV[0] or "message $i")});
+}
+$stomp->disconnect;
diff --git a/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_slow_recv.pl b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_slow_recv.pl
new file mode 100755
index 0000000000..043568f348
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/perl/rabbitmq_stomp_slow_recv.pl
@@ -0,0 +1,14 @@
+#!/usr/bin/perl -w
+# subscribe to messages from the queue 'foo'
+use Net::Stomp;
+my $stomp = Net::Stomp->new({hostname=>'localhost', port=>'61613'});
+$stomp->connect({login=>'guest', passcode=>'guest', prefetch=>1});
+$stomp->subscribe({'destination'=>'/queue/foo', 'ack'=>'client'});
+while (1) {
+ my $frame = $stomp->receive_frame;
+ print $frame->body . "\n";
+ sleep 1;
+ $stomp->ack({frame=>$frame});
+ last if $frame->body eq 'QUIT';
+}
+$stomp->disconnect;
diff --git a/deps/rabbitmq_stomp/examples/ruby/amq-sender.rb b/deps/rabbitmq_stomp/examples/ruby/amq-sender.rb
new file mode 100644
index 0000000000..baaab5628c
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/amq-sender.rb
@@ -0,0 +1,10 @@
+require 'rubygems'
+require 'stomp'
+
+client = Stomp::Client.new("guest", "guest", "localhost", 61613)
+
+# This publishes a message to a queue named 'amq-test' which is managed by AMQP broker.
+client.publish("/amq/queue/amq-test", "test-message")
+
+# close this connection
+client.close
diff --git a/deps/rabbitmq_stomp/examples/ruby/cb-receiver.rb b/deps/rabbitmq_stomp/examples/ruby/cb-receiver.rb
new file mode 100644
index 0000000000..4e6e26141a
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/cb-receiver.rb
@@ -0,0 +1,8 @@
+require 'rubygems'
+require 'stomp'
+
+conn = Stomp::Connection.open('guest', 'guest', 'localhost')
+conn.subscribe('/queue/carl')
+while mesg = conn.receive
+ puts mesg.body
+end
diff --git a/deps/rabbitmq_stomp/examples/ruby/cb-sender.rb b/deps/rabbitmq_stomp/examples/ruby/cb-sender.rb
new file mode 100644
index 0000000000..3d7594681f
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/cb-sender.rb
@@ -0,0 +1,6 @@
+require 'rubygems'
+require 'stomp'
+
+client = Stomp::Client.new("guest", "guest", "localhost", 61613)
+10000.times { |i| client.publish '/queue/carl', "Test Message number #{i}"}
+client.publish '/queue/carl', "All Done!"
diff --git a/deps/rabbitmq_stomp/examples/ruby/cb-slow-receiver.rb b/deps/rabbitmq_stomp/examples/ruby/cb-slow-receiver.rb
new file mode 100644
index 0000000000..d98e5f8170
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/cb-slow-receiver.rb
@@ -0,0 +1,13 @@
+require 'rubygems'
+require 'stomp'
+
+# Note: requires support for connect_headers hash in the STOMP gem's connection.rb
+conn = Stomp::Connection.open('guest', 'guest', 'localhost', 61613, false, 5, {:prefetch => 1})
+conn.subscribe('/queue/carl', {:ack => 'client'})
+while mesg = conn.receive
+ puts mesg.body
+ puts 'Sleeping...'
+ sleep 0.2
+ puts 'Awake again. Acking.'
+ conn.ack mesg.headers['message-id']
+end
diff --git a/deps/rabbitmq_stomp/examples/ruby/exchange-receiver.rb b/deps/rabbitmq_stomp/examples/ruby/exchange-receiver.rb
new file mode 100644
index 0000000000..76bf4a5c9d
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/exchange-receiver.rb
@@ -0,0 +1,15 @@
+require 'rubygems'
+require 'stomp'
+
+conn = Stomp::Connection.open("guest", "guest", "localhost")
+conn.subscribe '/exchange/amq.fanout/test'
+
+puts "Waiting for messages..."
+
+begin
+ while mesg = conn.receive
+ puts mesg.body
+ end
+rescue Exception => _
+ conn.disconnect
+end
diff --git a/deps/rabbitmq_stomp/examples/ruby/exchange-sender.rb b/deps/rabbitmq_stomp/examples/ruby/exchange-sender.rb
new file mode 100644
index 0000000000..ed556eacae
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/exchange-sender.rb
@@ -0,0 +1,12 @@
+require 'rubygems'
+require 'stomp'
+
+client = Stomp::Client.new("guest", "guest", "localhost", 61613)
+
+# This publishes a message to the 'amq.fanout' exchange which is managed by
+# AMQP broker and specifies routing-key of 'test'. You can get other exchanges
+# through 'list_exchanges' subcommand of 'rabbitmqctl' utility.
+client.publish("/exchange/amq.fanout/test", "test message")
+
+# close this connection
+client.close
diff --git a/deps/rabbitmq_stomp/examples/ruby/persistent-receiver.rb b/deps/rabbitmq_stomp/examples/ruby/persistent-receiver.rb
new file mode 100644
index 0000000000..5a83df6fb0
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/persistent-receiver.rb
@@ -0,0 +1,11 @@
+require 'rubygems'
+require 'stomp'
+
+conn = Stomp::Connection.open('guest', 'guest', 'localhost')
+conn.subscribe('/queue/durable', :'auto-delete' => false, :durable => true)
+
+puts "Waiting for messages..."
+
+while mesg = conn.receive
+ puts mesg.body
+end
diff --git a/deps/rabbitmq_stomp/examples/ruby/persistent-sender.rb b/deps/rabbitmq_stomp/examples/ruby/persistent-sender.rb
new file mode 100644
index 0000000000..1be32d6c76
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/persistent-sender.rb
@@ -0,0 +1,13 @@
+require 'rubygems'
+require 'stomp'
+
+# Use this case to test durable queues
+#
+# Start the sender - 11 messages will be sent to /queue/durable and the sender exits
+# Stop the server - 11 messages will be written to disk
+# Start the server
+# Start the receiver - 11 messages should be received and the receiver - interrupt the receive loop
+
+client = Stomp::Client.new("guest", "guest", "localhost", 61613)
+10.times { |i| client.publish '/queue/durable', "Test Message number #{i} sent at #{Time.now}", 'delivery-mode' => '2'}
+client.publish '/queue/durable', "All Done!"
diff --git a/deps/rabbitmq_stomp/examples/ruby/temp-queue-client.rb b/deps/rabbitmq_stomp/examples/ruby/temp-queue-client.rb
new file mode 100644
index 0000000000..39828708e8
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/temp-queue-client.rb
@@ -0,0 +1,9 @@
+require 'rubygems'
+require 'stomp'
+
+conn = Stomp::Connection.open("guest", "guest", "localhost")
+conn.publish("/queue/rpc-service", "test message", {
+ 'reply-to' => '/temp-queue/test'
+})
+puts conn.receive.body
+conn.disconnect
diff --git a/deps/rabbitmq_stomp/examples/ruby/temp-queue-service.rb b/deps/rabbitmq_stomp/examples/ruby/temp-queue-service.rb
new file mode 100644
index 0000000000..fea4fa7803
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/temp-queue-service.rb
@@ -0,0 +1,15 @@
+require 'rubygems'
+require 'stomp'
+
+conn = Stomp::Connection.open("guest", "guest", "localhost")
+conn.subscribe '/queue/rpc-service'
+
+begin
+ while mesg = conn.receive
+ puts "received message and replies to #{mesg.headers['reply-to']}"
+
+ conn.publish(mesg.headers['reply-to'], '(reply) ' + mesg.body)
+ end
+rescue Exception => _
+ conn.disconnect
+end
diff --git a/deps/rabbitmq_stomp/examples/ruby/topic-broadcast-receiver.rb b/deps/rabbitmq_stomp/examples/ruby/topic-broadcast-receiver.rb
new file mode 100644
index 0000000000..b338e53c34
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/topic-broadcast-receiver.rb
@@ -0,0 +1,11 @@
+require 'rubygems'
+require 'stomp'
+
+topic = ARGV[0] || 'x'
+puts "Binding to /topic/#{topic}"
+
+conn = Stomp::Connection.open('guest', 'guest', 'localhost')
+conn.subscribe("/topic/#{topic}")
+while mesg = conn.receive
+ puts mesg.body
+end
diff --git a/deps/rabbitmq_stomp/examples/ruby/topic-broadcast-with-unsubscribe.rb b/deps/rabbitmq_stomp/examples/ruby/topic-broadcast-with-unsubscribe.rb
new file mode 100644
index 0000000000..19f05ee9d2
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/topic-broadcast-with-unsubscribe.rb
@@ -0,0 +1,13 @@
+require 'rubygems'
+require 'stomp' # this is a gem
+
+conn = Stomp::Connection.open('guest', 'guest', 'localhost')
+puts "Subscribing to /topic/x"
+conn.subscribe('/topic/x')
+puts 'Receiving...'
+mesg = conn.receive
+puts mesg.body
+puts "Unsubscribing from /topic/x"
+conn.unsubscribe('/topic/x')
+puts 'Sleeping 5 seconds...'
+sleep 5
diff --git a/deps/rabbitmq_stomp/examples/ruby/topic-sender.rb b/deps/rabbitmq_stomp/examples/ruby/topic-sender.rb
new file mode 100644
index 0000000000..b0861f9542
--- /dev/null
+++ b/deps/rabbitmq_stomp/examples/ruby/topic-sender.rb
@@ -0,0 +1,7 @@
+require 'rubygems'
+require 'stomp'
+
+client = Stomp::Client.new("guest", "guest", "localhost", 61613)
+client.publish '/topic/x.y', 'first message'
+client.publish '/topic/x.z', 'second message'
+client.publish '/topic/x', 'third message'
diff --git a/deps/rabbitmq_stomp/include/rabbit_stomp.hrl b/deps/rabbitmq_stomp/include/rabbit_stomp.hrl
new file mode 100644
index 0000000000..3d31535d14
--- /dev/null
+++ b/deps/rabbitmq_stomp/include/rabbit_stomp.hrl
@@ -0,0 +1,42 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-record(stomp_configuration, {default_login,
+ default_passcode,
+ force_default_creds = false,
+ implicit_connect,
+ ssl_cert_login}).
+
+-define(SUPPORTED_VERSIONS, ["1.0", "1.1", "1.2"]).
+
+-define(INFO_ITEMS,
+ [conn_name,
+ connection,
+ connection_state,
+ session_id,
+ channel,
+ version,
+ implicit_connect,
+ auth_login,
+ auth_mechanism,
+ peer_addr,
+ host,
+ port,
+ peer_host,
+ peer_port,
+ protocol,
+ channels,
+ channel_max,
+ frame_max,
+ client_properties,
+ ssl,
+ ssl_protocol,
+ ssl_key_exchange,
+ ssl_cipher,
+ ssl_hash]).
+
+-define(STOMP_GUIDE_URL, <<"https://rabbitmq.com/stomp.html">>).
diff --git a/deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl b/deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl
new file mode 100644
index 0000000000..13b8b2e94c
--- /dev/null
+++ b/deps/rabbitmq_stomp/include/rabbit_stomp_frame.hrl
@@ -0,0 +1,8 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-record(stomp_frame, {command, headers, body_iolist}).
diff --git a/deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl b/deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl
new file mode 100644
index 0000000000..974b5825c8
--- /dev/null
+++ b/deps/rabbitmq_stomp/include/rabbit_stomp_headers.hrl
@@ -0,0 +1,73 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-define(HEADER_ACCEPT_VERSION, "accept-version").
+-define(HEADER_ACK, "ack").
+-define(HEADER_AMQP_MESSAGE_ID, "amqp-message-id").
+-define(HEADER_APP_ID, "app-id").
+-define(HEADER_AUTO_DELETE, "auto-delete").
+-define(HEADER_CONTENT_ENCODING, "content-encoding").
+-define(HEADER_CONTENT_LENGTH, "content-length").
+-define(HEADER_CONTENT_TYPE, "content-type").
+-define(HEADER_CORRELATION_ID, "correlation-id").
+-define(HEADER_DESTINATION, "destination").
+-define(HEADER_DURABLE, "durable").
+-define(HEADER_EXPIRATION, "expiration").
+-define(HEADER_EXCLUSIVE, "exclusive").
+-define(HEADER_HEART_BEAT, "heart-beat").
+-define(HEADER_HOST, "host").
+-define(HEADER_ID, "id").
+-define(HEADER_LOGIN, "login").
+-define(HEADER_MESSAGE_ID, "message-id").
+-define(HEADER_PASSCODE, "passcode").
+-define(HEADER_PERSISTENT, "persistent").
+-define(HEADER_PREFETCH_COUNT, "prefetch-count").
+-define(HEADER_PRIORITY, "priority").
+-define(HEADER_RECEIPT, "receipt").
+-define(HEADER_REDELIVERED, "redelivered").
+-define(HEADER_REPLY_TO, "reply-to").
+-define(HEADER_SERVER, "server").
+-define(HEADER_SESSION, "session").
+-define(HEADER_SUBSCRIPTION, "subscription").
+-define(HEADER_TIMESTAMP, "timestamp").
+-define(HEADER_TRANSACTION, "transaction").
+-define(HEADER_TYPE, "type").
+-define(HEADER_USER_ID, "user-id").
+-define(HEADER_VERSION, "version").
+-define(HEADER_X_DEAD_LETTER_EXCHANGE, "x-dead-letter-exchange").
+-define(HEADER_X_DEAD_LETTER_ROUTING_KEY, "x-dead-letter-routing-key").
+-define(HEADER_X_EXPIRES, "x-expires").
+-define(HEADER_X_MAX_LENGTH, "x-max-length").
+-define(HEADER_X_MAX_LENGTH_BYTES, "x-max-length-bytes").
+-define(HEADER_X_MAX_PRIORITY, "x-max-priority").
+-define(HEADER_X_MESSAGE_TTL, "x-message-ttl").
+-define(HEADER_X_QUEUE_NAME, "x-queue-name").
+-define(HEADER_X_QUEUE_TYPE, "x-queue-type").
+
+-define(MESSAGE_ID_SEPARATOR, "@@").
+
+-define(HEADERS_NOT_ON_SEND, [?HEADER_MESSAGE_ID]).
+
+-define(TEMP_QUEUE_ID_PREFIX, "/temp-queue/").
+
+-define(HEADER_ARGUMENTS, [
+ ?HEADER_X_DEAD_LETTER_EXCHANGE,
+ ?HEADER_X_DEAD_LETTER_ROUTING_KEY,
+ ?HEADER_X_EXPIRES,
+ ?HEADER_X_MAX_LENGTH,
+ ?HEADER_X_MAX_LENGTH_BYTES,
+ ?HEADER_X_MAX_PRIORITY,
+ ?HEADER_X_MESSAGE_TTL,
+ ?HEADER_X_QUEUE_TYPE
+ ]).
+
+-define(HEADER_PARAMS, [
+ ?HEADER_AUTO_DELETE,
+ ?HEADER_DURABLE,
+ ?HEADER_EXCLUSIVE,
+ ?HEADER_PERSISTENT
+ ]).
diff --git a/deps/rabbitmq_stomp/priv/schema/rabbitmq_stomp.schema b/deps/rabbitmq_stomp/priv/schema/rabbitmq_stomp.schema
new file mode 100644
index 0000000000..8b8646ae82
--- /dev/null
+++ b/deps/rabbitmq_stomp/priv/schema/rabbitmq_stomp.schema
@@ -0,0 +1,237 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% ==========================================================================
+%% ----------------------------------------------------------------------------
+%% RabbitMQ Stomp Adapter
+%%
+%% See https://www.rabbitmq.com/stomp.html for details
+%% ----------------------------------------------------------------------------
+
+% {rabbitmq_stomp,
+% [%% Network Configuration - the format is generally the same as for the broker
+
+%% Listen only on localhost (ipv4 & ipv6) on a specific port.
+%% {tcp_listeners, [{"127.0.0.1", 61613},
+%% {"::1", 61613}]},
+
+{mapping, "stomp.listeners.tcp", "rabbitmq_stomp.tcp_listeners",[
+ {datatype, {enum, [none]}}
+]}.
+
+{mapping, "stomp.listeners.tcp.$name", "rabbitmq_stomp.tcp_listeners",[
+ {datatype, [integer, ip]}
+]}.
+
+{translation, "rabbitmq_stomp.tcp_listeners",
+fun(Conf) ->
+ case cuttlefish:conf_get("stomp.listeners.tcp", Conf, undefined) of
+ none -> [];
+ _ ->
+ Settings = cuttlefish_variable:filter_by_prefix("stomp.listeners.tcp", Conf),
+ [ V || {_, V} <- Settings ]
+ end
+end}.
+
+{mapping, "stomp.tcp_listen_options", "rabbitmq_stomp.tcp_listen_options", [
+ {datatype, {enum, [none]}}]}.
+
+{translation, "rabbitmq_stomp.tcp_listen_options",
+fun(Conf) ->
+ case cuttlefish:conf_get("stomp.tcp_listen_options", Conf, undefined) of
+ none -> [];
+ _ -> cuttlefish:invalid("Invalid stomp.tcp_listen_options")
+ end
+end}.
+
+{mapping, "stomp.tcp_listen_options.backlog", "rabbitmq_stomp.tcp_listen_options.backlog", [
+ {datatype, integer}
+]}.
+
+{mapping, "stomp.tcp_listen_options.nodelay", "rabbitmq_stomp.tcp_listen_options.nodelay", [
+ {datatype, {enum, [true, false]}}
+]}.
+
+{mapping, "stomp.tcp_listen_options.buffer", "rabbitmq_stomp.tcp_listen_options.buffer",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.delay_send", "rabbitmq_stomp.tcp_listen_options.delay_send",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "stomp.tcp_listen_options.dontroute", "rabbitmq_stomp.tcp_listen_options.dontroute",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "stomp.tcp_listen_options.exit_on_close", "rabbitmq_stomp.tcp_listen_options.exit_on_close",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "stomp.tcp_listen_options.fd", "rabbitmq_stomp.tcp_listen_options.fd",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.high_msgq_watermark", "rabbitmq_stomp.tcp_listen_options.high_msgq_watermark",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.high_watermark", "rabbitmq_stomp.tcp_listen_options.high_watermark",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.keepalive", "rabbitmq_stomp.tcp_listen_options.keepalive",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "stomp.tcp_listen_options.low_msgq_watermark", "rabbitmq_stomp.tcp_listen_options.low_msgq_watermark",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.low_watermark", "rabbitmq_stomp.tcp_listen_options.low_watermark",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.port", "rabbitmq_stomp.tcp_listen_options.port",
+ [{datatype, integer}, {validators, ["port"]}]}.
+
+{mapping, "stomp.tcp_listen_options.priority", "rabbitmq_stomp.tcp_listen_options.priority",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.recbuf", "rabbitmq_stomp.tcp_listen_options.recbuf",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.send_timeout", "rabbitmq_stomp.tcp_listen_options.send_timeout",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.send_timeout_close", "rabbitmq_stomp.tcp_listen_options.send_timeout_close",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "stomp.tcp_listen_options.sndbuf", "rabbitmq_stomp.tcp_listen_options.sndbuf",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.tos", "rabbitmq_stomp.tcp_listen_options.tos",
+ [{datatype, integer}]}.
+
+{mapping, "stomp.tcp_listen_options.linger.on", "rabbitmq_stomp.tcp_listen_options.linger",
+ [{datatype, {enum, [true, false]}}]}.
+
+{mapping, "stomp.tcp_listen_options.linger.timeout", "rabbitmq_stomp.tcp_listen_options.linger",
+ [{datatype, integer}, {validators, ["non_negative_integer"]}]}.
+
+{translation, "rabbitmq_stomp.tcp_listen_options.linger",
+fun(Conf) ->
+ LingerOn = cuttlefish:conf_get("stomp.tcp_listen_options.linger.on", Conf, false),
+ LingerTimeout = cuttlefish:conf_get("stomp.tcp_listen_options.linger.timeout", Conf, 0),
+ {LingerOn, LingerTimeout}
+end}.
+
+
+%%
+%% TLS
+%%
+
+{mapping, "stomp.listeners.ssl", "rabbitmq_stomp.ssl_listeners",[
+ {datatype, {enum, [none]}}
+]}.
+
+{mapping, "stomp.listeners.ssl.$name", "rabbitmq_stomp.ssl_listeners",[
+ {datatype, [integer, ip]}
+]}.
+
+{translation, "rabbitmq_stomp.ssl_listeners",
+fun(Conf) ->
+ case cuttlefish:conf_get("stomp.listeners.ssl", Conf, undefined) of
+ none -> [];
+ _ ->
+ Settings = cuttlefish_variable:filter_by_prefix("stomp.listeners.ssl", Conf),
+ [ V || {_, V} <- Settings ]
+ end
+end}.
+
+%% Number of Erlang processes that will accept connections for the TCP
+%% and SSL listeners.
+%%
+%% {num_tcp_acceptors, 10},
+%% {num_ssl_acceptors, 10},
+
+{mapping, "stomp.num_acceptors.ssl", "rabbitmq_stomp.num_ssl_acceptors", [
+ {datatype, integer}
+]}.
+
+{mapping, "stomp.num_acceptors.tcp", "rabbitmq_stomp.num_tcp_acceptors", [
+ {datatype, integer}
+]}.
+
+%% Additional TLS options
+
+%% Extract a name from the client's certificate when using TLS.
+%%
+%% Defaults to true.
+
+{mapping, "stomp.ssl_cert_login", "rabbitmq_stomp.ssl_cert_login",
+ [{datatype, {enum, [true, false]}}]}.
+
+%% Set a default user name and password. This is used as the default login
+%% whenever a CONNECT frame omits the login and passcode headers.
+%%
+%% Please note that setting this will allow clients to connect without
+%% authenticating!
+%%
+%% {default_user, [{login, "guest"},
+%% {passcode, "guest"}]},
+
+{mapping, "stomp.default_vhost", "rabbitmq_stomp.default_vhost", [
+ {datatype, string}
+]}.
+
+{translation, "rabbitmq_stomp.default_vhost",
+fun(Conf) ->
+ list_to_binary(cuttlefish:conf_get("stomp.default_vhost", Conf, "/"))
+end}.
+
+{mapping, "stomp.default_user", "rabbitmq_stomp.default_user.login", [
+ {datatype, string}
+]}.
+
+{mapping, "stomp.default_pass", "rabbitmq_stomp.default_user.passcode", [
+ {datatype, string}
+]}.
+
+{mapping, "stomp.default_topic_exchange", "rabbitmq_stomp.default_topic_exchange", [
+ {datatype, string}
+]}.
+
+{translation, "rabbitmq_stomp.default_topic_exchange",
+fun(Conf) ->
+ list_to_binary(cuttlefish:conf_get("stomp.default_topic_exchange", Conf, "amq.topic"))
+end}.
+
+%% If a default user is configured, or if x.509
+%% certificate-based client authentication is used, use this setting to allow clients to
+%% omit the CONNECT frame entirely. If set to true, the client is
+%% automatically connected as the default user or user supplied in the
+%% x.509/TLS certificate whenever the first frame sent on a session is not a
+%% CONNECT frame.
+%%
+%% Defaults to true.
+
+{mapping, "stomp.implicit_connect", "rabbitmq_stomp.implicit_connect",
+ [{datatype, {enum, [true, false]}}]}.
+
+%% Whether or not to enable proxy protocol support.
+%%
+%% Defaults to false.
+
+{mapping, "stomp.proxy_protocol", "rabbitmq_stomp.proxy_protocol",
+ [{datatype, {enum, [true, false]}}]}.
+
+%% Whether or not to hide server info
+%%
+%% Defaults to false.
+
+{mapping, "stomp.hide_server_info", "rabbitmq_stomp.hide_server_info",
+ [{datatype, {enum, [true, false]}}]}.
+
+%% Whether or not to always requeue the message on nack
+%% If not set then coordinated by the usage of the frame "requeue" header
+%% Useful when you are not fully controlling the STOMP consumer implementation
+%%
+%% Defaults to true.
+
+{mapping, "stomp.default_nack_requeue", "rabbitmq_stomp.default_nack_requeue",
+ [{datatype, {enum, [true, false]}}]}.
diff --git a/deps/rabbitmq_stomp/rabbitmq-components.mk b/deps/rabbitmq_stomp/rabbitmq-components.mk
new file mode 100644
index 0000000000..b2a3be8b35
--- /dev/null
+++ b/deps/rabbitmq_stomp/rabbitmq-components.mk
@@ -0,0 +1,359 @@
+ifeq ($(.DEFAULT_GOAL),)
+# Define default goal to `all` because this file defines some targets
+# before the inclusion of erlang.mk leading to the wrong target becoming
+# the default.
+.DEFAULT_GOAL = all
+endif
+
+# PROJECT_VERSION defaults to:
+# 1. the version exported by rabbitmq-server-release;
+# 2. the version stored in `git-revisions.txt`, if it exists;
+# 3. a version based on git-describe(1), if it is a Git clone;
+# 4. 0.0.0
+
+PROJECT_VERSION := $(RABBITMQ_VERSION)
+
+ifeq ($(PROJECT_VERSION),)
+PROJECT_VERSION := $(shell \
+if test -f git-revisions.txt; then \
+ head -n1 git-revisions.txt | \
+ awk '{print $$$(words $(PROJECT_DESCRIPTION) version);}'; \
+else \
+ (git describe --dirty --abbrev=7 --tags --always --first-parent \
+ 2>/dev/null || echo rabbitmq_v0_0_0) | \
+ sed -e 's/^rabbitmq_v//' -e 's/^v//' -e 's/_/./g' -e 's/-/+/' \
+ -e 's/-/./g'; \
+fi)
+endif
+
+# --------------------------------------------------------------------
+# RabbitMQ components.
+# --------------------------------------------------------------------
+
+# For RabbitMQ repositories, we want to checkout branches which match
+# the parent project. For instance, if the parent project is on a
+# release tag, dependencies must be on the same release tag. If the
+# parent project is on a topic branch, dependencies must be on the same
+# topic branch or fallback to `stable` or `master` whichever was the
+# base of the topic branch.
+
+dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_amqp10_client = git_rmq rabbitmq-amqp1.0-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_amqp10_common = git_rmq rabbitmq-amqp1.0-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_cache = git_rmq rabbitmq-auth-backend-cache $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_backend_oauth2 = git_rmq rabbitmq-auth-backend-oauth2 $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_aws = git_rmq rabbitmq-aws $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_cli = git_rmq rabbitmq-cli $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_client_helpers = git_rmq rabbitmq-ct-client-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_ct_helpers = git_rmq rabbitmq-ct-helpers $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_client = git_rmq rabbitmq-jms-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_cts = git_rmq rabbitmq-jms-cts $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_jms_topic_exchange = git_rmq rabbitmq-jms-topic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_lvc_exchange = git_rmq rabbitmq-lvc-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_objc_client = git_rmq rabbitmq-objc-client $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_aws = git_rmq rabbitmq-peer-discovery-aws $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_common = git_rmq rabbitmq-peer-discovery-common $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_consul = git_rmq rabbitmq-peer-discovery-consul $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_etcd = git_rmq rabbitmq-peer-discovery-etcd $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_peer_discovery_k8s = git_rmq rabbitmq-peer-discovery-k8s $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_prometheus = git_rmq rabbitmq-prometheus $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_random_exchange = git_rmq rabbitmq-random-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_routing_node_stamp = git_rmq rabbitmq-routing-node-stamp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_server_release = git_rmq rabbitmq-server-release $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_stream = git_rmq rabbitmq-stream $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_trust_store = git_rmq rabbitmq-trust-store $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt = git_rmq rabbitmq-web-mqtt $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_web_mqtt_examples = git_rmq rabbitmq-web-mqtt-examples $(current_rmq_ref) $(base_rmq_ref) master
+dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master
+dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master
+
+dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master
+
+# Third-party dependencies version pinning.
+#
+# We do that in this file, which is copied in all projects, to ensure
+# all projects use the same versions. It avoids conflicts and makes it
+# possible to work with rabbitmq-public-umbrella.
+
+dep_accept = hex 0.3.5
+dep_cowboy = hex 2.8.0
+dep_cowlib = hex 2.9.1
+dep_jsx = hex 2.11.0
+dep_lager = hex 3.8.0
+dep_prometheus = git https://github.com/deadtrickster/prometheus.erl.git master
+dep_ra = git https://github.com/rabbitmq/ra.git master
+dep_ranch = hex 1.7.1
+dep_recon = hex 2.5.1
+dep_observer_cli = hex 1.5.4
+dep_stdout_formatter = hex 0.2.4
+dep_sysmon_handler = hex 1.3.0
+
+RABBITMQ_COMPONENTS = amqp_client \
+ amqp10_common \
+ amqp10_client \
+ rabbit \
+ rabbit_common \
+ rabbitmq_amqp1_0 \
+ rabbitmq_auth_backend_amqp \
+ rabbitmq_auth_backend_cache \
+ rabbitmq_auth_backend_http \
+ rabbitmq_auth_backend_ldap \
+ rabbitmq_auth_backend_oauth2 \
+ rabbitmq_auth_mechanism_ssl \
+ rabbitmq_aws \
+ rabbitmq_boot_steps_visualiser \
+ rabbitmq_cli \
+ rabbitmq_codegen \
+ rabbitmq_consistent_hash_exchange \
+ rabbitmq_ct_client_helpers \
+ rabbitmq_ct_helpers \
+ rabbitmq_delayed_message_exchange \
+ rabbitmq_dotnet_client \
+ rabbitmq_event_exchange \
+ rabbitmq_federation \
+ rabbitmq_federation_management \
+ rabbitmq_java_client \
+ rabbitmq_jms_client \
+ rabbitmq_jms_cts \
+ rabbitmq_jms_topic_exchange \
+ rabbitmq_lvc_exchange \
+ rabbitmq_management \
+ rabbitmq_management_agent \
+ rabbitmq_management_exchange \
+ rabbitmq_management_themes \
+ rabbitmq_message_timestamp \
+ rabbitmq_metronome \
+ rabbitmq_mqtt \
+ rabbitmq_objc_client \
+ rabbitmq_peer_discovery_aws \
+ rabbitmq_peer_discovery_common \
+ rabbitmq_peer_discovery_consul \
+ rabbitmq_peer_discovery_etcd \
+ rabbitmq_peer_discovery_k8s \
+ rabbitmq_prometheus \
+ rabbitmq_random_exchange \
+ rabbitmq_recent_history_exchange \
+ rabbitmq_routing_node_stamp \
+ rabbitmq_rtopic_exchange \
+ rabbitmq_server_release \
+ rabbitmq_sharding \
+ rabbitmq_shovel \
+ rabbitmq_shovel_management \
+ rabbitmq_stomp \
+ rabbitmq_stream \
+ rabbitmq_toke \
+ rabbitmq_top \
+ rabbitmq_tracing \
+ rabbitmq_trust_store \
+ rabbitmq_web_dispatch \
+ rabbitmq_web_mqtt \
+ rabbitmq_web_mqtt_examples \
+ rabbitmq_web_stomp \
+ rabbitmq_web_stomp_examples \
+ rabbitmq_website
+
+# Erlang.mk does not rebuild dependencies by default, once they were
+# compiled once, except for those listed in the `$(FORCE_REBUILD)`
+# variable.
+#
+# We want all RabbitMQ components to always be rebuilt: this eases
+# the work on several components at the same time.
+
+FORCE_REBUILD = $(RABBITMQ_COMPONENTS)
+
+# Several components have a custom erlang.mk/build.config, mainly
+# to disable eunit. Therefore, we can't use the top-level project's
+# erlang.mk copy.
+NO_AUTOPATCH += $(RABBITMQ_COMPONENTS)
+
+ifeq ($(origin current_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+current_rmq_ref := $(shell (\
+ ref=$$(LANG=C git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\
+ if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi))
+else
+current_rmq_ref := master
+endif
+endif
+export current_rmq_ref
+
+ifeq ($(origin base_rmq_ref),undefined)
+ifneq ($(wildcard .git),)
+possible_base_rmq_ref := master
+ifeq ($(possible_base_rmq_ref),$(current_rmq_ref))
+base_rmq_ref := $(current_rmq_ref)
+else
+base_rmq_ref := $(shell \
+ (git rev-parse --verify -q master >/dev/null && \
+ git rev-parse --verify -q $(possible_base_rmq_ref) >/dev/null && \
+ git merge-base --is-ancestor $$(git merge-base master HEAD) $(possible_base_rmq_ref) && \
+ echo $(possible_base_rmq_ref)) || \
+ echo master)
+endif
+else
+base_rmq_ref := master
+endif
+endif
+export base_rmq_ref
+
+# Repository URL selection.
+#
+# First, we infer other components' location from the current project
+# repository URL, if it's a Git repository:
+# - We take the "origin" remote URL as the base
+# - The current project name and repository name is replaced by the
+# target's properties:
+# eg. rabbitmq-common is replaced by rabbitmq-codegen
+# eg. rabbit_common is replaced by rabbitmq_codegen
+#
+# If cloning from this computed location fails, we fallback to RabbitMQ
+# upstream which is GitHub.
+
+# Macro to transform eg. "rabbit_common" to "rabbitmq-common".
+rmq_cmp_repo_name = $(word 2,$(dep_$(1)))
+
+# Upstream URL for the current project.
+RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT))
+RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git
+
+# Current URL for the current project. If this is not a Git clone,
+# default to the upstream Git repository.
+ifneq ($(wildcard .git),)
+git_origin_fetch_url := $(shell git config remote.origin.url)
+git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url)
+RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url)
+RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url)
+else
+RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL)
+RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL)
+endif
+
+# Macro to replace the following pattern:
+# 1. /foo.git -> /bar.git
+# 2. /foo -> /bar
+# 3. /foo/ -> /bar/
+subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3))))
+
+# Macro to replace both the project's name (eg. "rabbit_common") and
+# repository name (eg. "rabbitmq-common") by the target's equivalent.
+#
+# This macro is kept on one line because we don't want whitespaces in
+# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell
+# single-quoted string.
+dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo))
+
+dep_rmq_commits = $(if $(dep_$(1)), \
+ $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \
+ $(pkg_$(1)_commit))
+
+define dep_fetch_git_rmq
+ fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \
+ fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \
+ if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \
+ git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+ fetch_url="$$$$fetch_url1"; \
+ push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \
+ elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \
+ fetch_url="$$$$fetch_url2"; \
+ push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \
+ fi; \
+ cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \
+ $(foreach ref,$(call dep_rmq_commits,$(1)), \
+ git checkout -q $(ref) >/dev/null 2>&1 || \
+ ) \
+ (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \
+ 1>&2 && false) ) && \
+ (test "$$$$fetch_url" = "$$$$push_url" || \
+ git remote set-url --push origin "$$$$push_url")
+endef
+
+# --------------------------------------------------------------------
+# Component distribution.
+# --------------------------------------------------------------------
+
+list-dist-deps::
+ @:
+
+prepare-dist::
+ @:
+
+# --------------------------------------------------------------------
+# Umbrella-specific settings.
+# --------------------------------------------------------------------
+
+# If the top-level project is a RabbitMQ component, we override
+# $(DEPS_DIR) for this project to point to the top-level's one.
+#
+# We also verify that the guessed DEPS_DIR is actually named `deps`,
+# to rule out any situation where it is a coincidence that we found a
+# `rabbitmq-components.mk` up upper directories.
+
+possible_deps_dir_1 = $(abspath ..)
+possible_deps_dir_2 = $(abspath ../../..)
+
+ifeq ($(notdir $(possible_deps_dir_1)),deps)
+ifneq ($(wildcard $(possible_deps_dir_1)/../rabbitmq-components.mk),)
+deps_dir_overriden = 1
+DEPS_DIR ?= $(possible_deps_dir_1)
+DISABLE_DISTCLEAN = 1
+endif
+endif
+
+ifeq ($(deps_dir_overriden),)
+ifeq ($(notdir $(possible_deps_dir_2)),deps)
+ifneq ($(wildcard $(possible_deps_dir_2)/../rabbitmq-components.mk),)
+deps_dir_overriden = 1
+DEPS_DIR ?= $(possible_deps_dir_2)
+DISABLE_DISTCLEAN = 1
+endif
+endif
+endif
+
+ifneq ($(wildcard UMBRELLA.md),)
+DISABLE_DISTCLEAN = 1
+endif
+
+# We disable `make distclean` so $(DEPS_DIR) is not accidentally removed.
+
+ifeq ($(DISABLE_DISTCLEAN),1)
+ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),)
+SKIP_DEPS = 1
+endif
+endif
diff --git a/deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl b/deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl
new file mode 100644
index 0000000000..d26615e99f
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand.erl
@@ -0,0 +1,95 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+
+-module('Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand').
+
+-include("rabbit_stomp.hrl").
+
+-behaviour('Elixir.RabbitMQ.CLI.CommandBehaviour').
+
+-export([formatter/0,
+ scopes/0,
+ switches/0,
+ aliases/0,
+ usage/0,
+ usage_additional/0,
+ usage_doc_guides/0,
+ banner/2,
+ validate/2,
+ merge_defaults/2,
+ run/2,
+ output/2,
+ description/0,
+ help_section/0]).
+
+formatter() -> 'Elixir.RabbitMQ.CLI.Formatters.Table'.
+
+scopes() -> [ctl, diagnostics].
+
+switches() -> [{verbose, boolean}].
+aliases() -> [{'V', verbose}].
+
+description() -> <<"Lists STOMP connections on the target node">>.
+
+help_section() ->
+ {plugin, stomp}.
+
+validate(Args, _) ->
+ case 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':validate_info_keys(Args,
+ ?INFO_ITEMS) of
+ {ok, _} -> ok;
+ Error -> Error
+ end.
+
+merge_defaults([], Opts) ->
+ merge_defaults([<<"session_id">>, <<"conn_name">>], Opts);
+merge_defaults(Args, Opts) ->
+ {Args, maps:merge(#{verbose => false}, Opts)}.
+
+usage() ->
+ <<"list_stomp_connections [<column> ...]">>.
+
+usage_additional() ->
+ Prefix = <<" must be one of ">>,
+ InfoItems = 'Elixir.Enum':join(lists:usort(?INFO_ITEMS), <<", ">>),
+ [
+ {<<"<column>">>, <<Prefix/binary, InfoItems/binary>>}
+ ].
+
+usage_doc_guides() ->
+ [?STOMP_GUIDE_URL].
+
+run(Args, #{node := NodeName,
+ timeout := Timeout,
+ verbose := Verbose}) ->
+ InfoKeys = case Verbose of
+ true -> ?INFO_ITEMS;
+ false -> 'Elixir.RabbitMQ.CLI.Ctl.InfoKeys':prepare_info_keys(Args)
+ end,
+ Nodes = 'Elixir.RabbitMQ.CLI.Core.Helpers':nodes_in_cluster(NodeName),
+
+ 'Elixir.RabbitMQ.CLI.Ctl.RpcStream':receive_list_items(
+ NodeName,
+ rabbit_stomp,
+ emit_connection_info_all,
+ [Nodes, InfoKeys],
+ Timeout,
+ InfoKeys,
+ length(Nodes)).
+
+banner(_, _) -> <<"Listing STOMP connections ...">>.
+
+output(Result, _Opts) ->
+ 'Elixir.RabbitMQ.CLI.DefaultOutput':output(Result).
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp.erl b/deps/rabbitmq_stomp/src/rabbit_stomp.erl
new file mode 100644
index 0000000000..449c2ef92f
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp.erl
@@ -0,0 +1,131 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stomp).
+
+-include("rabbit_stomp.hrl").
+
+-behaviour(application).
+-export([start/2, stop/1]).
+-export([parse_default_user/2]).
+-export([connection_info_local/1,
+ emit_connection_info_local/3,
+ emit_connection_info_all/4,
+ list/0,
+ close_all_client_connections/1]).
+
+-define(DEFAULT_CONFIGURATION,
+ #stomp_configuration{
+ default_login = undefined,
+ default_passcode = undefined,
+ implicit_connect = false,
+ ssl_cert_login = false}).
+
+start(normal, []) ->
+ Config = parse_configuration(),
+ Listeners = parse_listener_configuration(),
+ Result = rabbit_stomp_sup:start_link(Listeners, Config),
+ EMPid = case rabbit_event:start_link() of
+ {ok, Pid} -> Pid;
+ {error, {already_started, Pid}} -> Pid
+ end,
+ gen_event:add_handler(EMPid, rabbit_stomp_internal_event_handler, []),
+ Result.
+
+stop(_) ->
+ rabbit_stomp_sup:stop_listeners().
+
+-spec close_all_client_connections(string() | binary()) -> {'ok', non_neg_integer()}.
+close_all_client_connections(Reason) ->
+ Connections = list(),
+ [rabbit_stomp_reader:close_connection(Pid, Reason) || Pid <- Connections],
+ {ok, length(Connections)}.
+
+emit_connection_info_all(Nodes, Items, Ref, AggregatorPid) ->
+ Pids = [spawn_link(Node, rabbit_stomp, emit_connection_info_local,
+ [Items, Ref, AggregatorPid])
+ || Node <- Nodes],
+ rabbit_control_misc:await_emitters_termination(Pids),
+ ok.
+
+emit_connection_info_local(Items, Ref, AggregatorPid) ->
+ rabbit_control_misc:emitting_map_with_exit_handler(
+ AggregatorPid, Ref, fun(Pid) ->
+ rabbit_stomp_reader:info(Pid, Items)
+ end,
+ list()).
+
+connection_info_local(Items) ->
+ Connections = list(),
+ [rabbit_stomp_reader:info(Pid, Items) || Pid <- Connections].
+
+parse_listener_configuration() ->
+ {ok, Listeners} = application:get_env(tcp_listeners),
+ {ok, SslListeners} = application:get_env(ssl_listeners),
+ {Listeners, SslListeners}.
+
+parse_configuration() ->
+ {ok, UserConfig} = application:get_env(default_user),
+ Conf0 = parse_default_user(UserConfig, ?DEFAULT_CONFIGURATION),
+ {ok, SSLLogin} = application:get_env(ssl_cert_login),
+ {ok, ImplicitConnect} = application:get_env(implicit_connect),
+ Conf = Conf0#stomp_configuration{ssl_cert_login = SSLLogin,
+ implicit_connect = ImplicitConnect},
+ report_configuration(Conf),
+ Conf.
+
+parse_default_user([], Configuration) ->
+ Configuration;
+parse_default_user([{login, Login} | Rest], Configuration) ->
+ parse_default_user(Rest, Configuration#stomp_configuration{
+ default_login = Login});
+parse_default_user([{passcode, Passcode} | Rest], Configuration) ->
+ parse_default_user(Rest, Configuration#stomp_configuration{
+ default_passcode = Passcode});
+parse_default_user([Unknown | Rest], Configuration) ->
+ rabbit_log:warning("rabbit_stomp: ignoring invalid default_user "
+ "configuration option: ~p~n", [Unknown]),
+ parse_default_user(Rest, Configuration).
+
+report_configuration(#stomp_configuration{
+ default_login = Login,
+ implicit_connect = ImplicitConnect,
+ ssl_cert_login = SSLCertLogin}) ->
+ case Login of
+ undefined -> ok;
+ _ -> rabbit_log:info("rabbit_stomp: default user '~s' "
+ "enabled~n", [Login])
+ end,
+
+ case ImplicitConnect of
+ true -> rabbit_log:info("rabbit_stomp: implicit connect enabled~n");
+ false -> ok
+ end,
+
+ case SSLCertLogin of
+ true -> rabbit_log:info("rabbit_stomp: ssl_cert_login enabled~n");
+ false -> ok
+ end,
+
+ ok.
+
+list() ->
+ [Client
+ || {_, ListSupPid, _, _} <- supervisor2:which_children(rabbit_stomp_sup),
+ {_, RanchSup, supervisor, _} <- supervisor2:which_children(ListSupPid),
+ {ranch_conns_sup, ConnSup, _, _} <- supervisor:which_children(RanchSup),
+ {_, CliSup, _, _} <- supervisor:which_children(ConnSup),
+ {rabbit_stomp_reader, Client, _, _} <- supervisor:which_children(CliSup)].
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl
new file mode 100644
index 0000000000..d40e00f811
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_client_sup.erl
@@ -0,0 +1,50 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stomp_client_sup).
+-behaviour(supervisor2).
+-behaviour(ranch_protocol).
+
+-include_lib("rabbit_common/include/rabbit.hrl").
+
+-export([start_link/4, init/1]).
+
+start_link(Ref, _Sock, _Transport, Configuration) ->
+ {ok, SupPid} = supervisor2:start_link(?MODULE, []),
+ {ok, HelperPid} =
+ supervisor2:start_child(SupPid,
+ {rabbit_stomp_heartbeat_sup,
+ {rabbit_connection_helper_sup, start_link, []},
+ intrinsic, infinity, supervisor,
+ [rabbit_connection_helper_sup]}),
+
+ %% We want the reader to be transient since when it exits normally
+ %% the processor may have some work still to do (and the reader
+ %% tells the processor to exit). However, if the reader terminates
+ %% abnormally then we want to take everything down.
+ {ok, ReaderPid} = supervisor2:start_child(
+ SupPid,
+ {rabbit_stomp_reader,
+ {rabbit_stomp_reader,
+ start_link, [HelperPid, Ref, Configuration]},
+ intrinsic, ?WORKER_WAIT, worker,
+ [rabbit_stomp_reader]}),
+
+ {ok, SupPid, ReaderPid}.
+
+init([]) ->
+ {ok, {{one_for_all, 0, 1}, []}}.
+
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl
new file mode 100644
index 0000000000..e1562796e3
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_connection_info.erl
@@ -0,0 +1,25 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2018-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+-module(rabbit_stomp_connection_info).
+
+%% Note: this is necessary to prevent code:get_object_code from
+%% backing up due to a missing module. See VESC-888.
+
+%% API
+-export([additional_authn_params/4]).
+
+additional_authn_params(_Creds, _VHost, _Pid, _Infos) ->
+ [].
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl
new file mode 100644
index 0000000000..6b91dc3748
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_frame.erl
@@ -0,0 +1,266 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% stomp_frame implements the STOMP framing protocol "version 1.0", as
+%% per https://stomp.codehaus.org/Protocol
+
+-module(rabbit_stomp_frame).
+
+-include("rabbit_stomp_frame.hrl").
+-include("rabbit_stomp_headers.hrl").
+
+-export([parse/2, initial_state/0]).
+-export([header/2, header/3,
+ boolean_header/2, boolean_header/3,
+ integer_header/2, integer_header/3,
+ binary_header/2, binary_header/3]).
+-export([serialize/1, serialize/2]).
+
+initial_state() -> none.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% STOMP 1.1 frames basic syntax
+%% Rabbit modifications:
+%% o CR LF is equivalent to LF in all element terminators (eol).
+%% o Escape codes for header names and values include \r for CR
+%% and CR is not allowed.
+%% o Header names and values are not limited to UTF-8 strings.
+%% o Header values may contain unescaped colons
+%%
+%% frame_seq ::= *(noise frame)
+%% noise ::= *(NUL | eol)
+%% eol ::= LF | CR LF
+%% frame ::= cmd hdrs body NUL
+%% body ::= *OCTET
+%% cmd ::= 1*NOTEOL eol
+%% hdrs ::= *hdr eol
+%% hdr ::= hdrname COLON hdrvalue eol
+%% hdrname ::= 1*esc_char
+%% hdrvalue ::= *esc_char
+%% esc_char ::= HDROCT | BACKSLASH ESCCODE
+%%
+%% Terms in CAPS all represent sets (alternatives) of single octets.
+%% They are defined here using a small extension of BNF, minus (-):
+%%
+%% term1 - term2 denotes any of the possibilities in term1
+%% excluding those in term2.
+%% In this grammar minus is only used for sets of single octets.
+%%
+%% OCTET ::= '00'x..'FF'x % any octet
+%% NUL ::= '00'x % the zero octet
+%% LF ::= '\n' % '0a'x newline or linefeed
+%% CR ::= '\r' % '0d'x carriage return
+%% NOTEOL ::= OCTET - (CR | LF) % any octet except CR or LF
+%% BACKSLASH ::= '\\' % '5c'x
+%% ESCCODE ::= 'c' | 'n' | 'r' | BACKSLASH
+%% COLON ::= ':'
+%% HDROCT ::= NOTEOL - (COLON | BACKSLASH)
+%% % octets allowed in a header
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% explicit frame characters
+-define(NUL, 0).
+-define(CR, $\r).
+-define(LF, $\n).
+-define(BSL, $\\).
+-define(COLON, $:).
+
+%% header escape codes
+-define(LF_ESC, $n).
+-define(BSL_ESC, $\\).
+-define(COLON_ESC, $c).
+-define(CR_ESC, $r).
+
+%% parser state
+-record(state, {acc, cmd, hdrs, hdrname}).
+
+parse(Content, {resume, Continuation}) -> Continuation(Content);
+parse(Content, none ) -> parser(Content, noframe, #state{}).
+
+more(Continuation) -> {more, {resume, Continuation}}.
+
+%% Single-function parser: Term :: noframe | command | headers | hdrname | hdrvalue
+%% general more and line-end detection
+parser(<<>>, Term , State) -> more(fun(Rest) -> parser(Rest, Term, State) end);
+parser(<<?CR>>, Term , State) -> more(fun(Rest) -> parser(<<?CR, Rest/binary>>, Term, State) end);
+parser(<<?CR, ?LF, Rest/binary>>, Term , State) -> parser(<<?LF, Rest/binary>>, Term, State);
+parser(<<?CR, Ch:8, _Rest/binary>>, Term , _State) -> {error, {unexpected_chars(Term), [?CR, Ch]}};
+%% escape processing (only in hdrname and hdrvalue terms)
+parser(<<?BSL>>, Term , State) -> more(fun(Rest) -> parser(<<?BSL, Rest/binary>>, Term, State) end);
+parser(<<?BSL, Ch:8, Rest/binary>>, Term , State)
+ when Term == hdrname;
+ Term == hdrvalue -> unescape(Ch, fun(Ech) -> parser(Rest, Term, accum(Ech, State)) end);
+%% inter-frame noise
+parser(<<?NUL, Rest/binary>>, noframe , State) -> parser(Rest, noframe, State);
+parser(<<?LF, Rest/binary>>, noframe , State) -> parser(Rest, noframe, State);
+%% detect transitions
+parser( Rest, noframe , State) -> goto(noframe, command, Rest, State);
+parser(<<?LF, Rest/binary>>, command , State) -> goto(command, headers, Rest, State);
+parser(<<?LF, Rest/binary>>, headers , State) -> goto(headers, body, Rest, State);
+parser( Rest, headers , State) -> goto(headers, hdrname, Rest, State);
+parser(<<?COLON, Rest/binary>>, hdrname , State) -> goto(hdrname, hdrvalue, Rest, State);
+parser(<<?LF, Rest/binary>>, hdrname , State) -> goto(hdrname, headers, Rest, State);
+parser(<<?LF, Rest/binary>>, hdrvalue, State) -> goto(hdrvalue, headers, Rest, State);
+%% accumulate
+parser(<<Ch:8, Rest/binary>>, Term , State) -> parser(Rest, Term, accum(Ch, State)).
+
+%% state transitions
+goto(noframe, command, Rest, State ) -> parser(Rest, command, State#state{acc = []});
+goto(command, headers, Rest, State = #state{acc = Acc} ) -> parser(Rest, headers, State#state{cmd = lists:reverse(Acc), hdrs = []});
+goto(headers, body, Rest, #state{cmd = Cmd, hdrs = Hdrs}) -> parse_body(Rest, #stomp_frame{command = Cmd, headers = Hdrs});
+goto(headers, hdrname, Rest, State ) -> parser(Rest, hdrname, State#state{acc = []});
+goto(hdrname, hdrvalue, Rest, State = #state{acc = Acc} ) -> parser(Rest, hdrvalue, State#state{acc = [], hdrname = lists:reverse(Acc)});
+goto(hdrname, headers, _Rest, #state{acc = Acc} ) -> {error, {header_no_value, lists:reverse(Acc)}}; % badly formed header -- fatal error
+goto(hdrvalue, headers, Rest, State = #state{acc = Acc, hdrs = Headers, hdrname = HdrName}) ->
+ parser(Rest, headers, State#state{hdrs = insert_header(Headers, HdrName, lists:reverse(Acc))}).
+
+%% error atom
+unexpected_chars(noframe) -> unexpected_chars_between_frames;
+unexpected_chars(command) -> unexpected_chars_in_command;
+unexpected_chars(hdrname) -> unexpected_chars_in_header;
+unexpected_chars(hdrvalue) -> unexpected_chars_in_header;
+unexpected_chars(_Term) -> unexpected_chars.
+
+%% general accumulation
+accum(Ch, State = #state{acc = Acc}) -> State#state{acc = [Ch | Acc]}.
+
+%% resolve escapes (with error processing)
+unescape(?LF_ESC, Fun) -> Fun(?LF);
+unescape(?BSL_ESC, Fun) -> Fun(?BSL);
+unescape(?COLON_ESC, Fun) -> Fun(?COLON);
+unescape(?CR_ESC, Fun) -> Fun(?CR);
+unescape(Ch, _Fun) -> {error, {bad_escape, [?BSL, Ch]}}.
+
+%% insert header unless aleady seen
+insert_header(Headers, Name, Value) ->
+ case lists:keymember(Name, 1, Headers) of
+ true -> Headers; % first header only
+ false -> [{Name, Value} | Headers]
+ end.
+
+parse_body(Content, Frame = #stomp_frame{command = Command}) ->
+ case Command of
+ "SEND" -> parse_body(Content, Frame, [], integer_header(Frame, ?HEADER_CONTENT_LENGTH, unknown));
+ _ -> parse_body(Content, Frame, [], unknown)
+ end.
+
+parse_body(Content, Frame, Chunks, unknown) ->
+ parse_body2(Content, Frame, Chunks, case firstnull(Content) of
+ -1 -> {more, unknown};
+ Pos -> {done, Pos}
+ end);
+parse_body(Content, Frame, Chunks, Remaining) ->
+ Size = byte_size(Content),
+ parse_body2(Content, Frame, Chunks, case Remaining >= Size of
+ true -> {more, Remaining - Size};
+ false -> {done, Remaining}
+ end).
+
+parse_body2(Content, Frame, Chunks, {more, Left}) ->
+ Chunks1 = finalize_chunk(Content, Chunks),
+ more(fun(Rest) -> parse_body(Rest, Frame, Chunks1, Left) end);
+parse_body2(Content, Frame, Chunks, {done, Pos}) ->
+ <<Chunk:Pos/binary, 0, Rest/binary>> = Content,
+ Body = lists:reverse(finalize_chunk(Chunk, Chunks)),
+ {ok, Frame#stomp_frame{body_iolist = Body}, Rest}.
+
+finalize_chunk(<<>>, Chunks) -> Chunks;
+finalize_chunk(Chunk, Chunks) -> [Chunk | Chunks].
+
+default_value({ok, Value}, _DefaultValue) -> Value;
+default_value(not_found, DefaultValue) -> DefaultValue.
+
+header(#stomp_frame{headers = Headers}, Key) ->
+ case lists:keysearch(Key, 1, Headers) of
+ {value, {_, Str}} -> {ok, Str};
+ _ -> not_found
+ end.
+
+header(F, K, D) -> default_value(header(F, K), D).
+
+boolean_header(#stomp_frame{headers = Headers}, Key) ->
+ case lists:keysearch(Key, 1, Headers) of
+ {value, {_, "true"}} -> {ok, true};
+ {value, {_, "false"}} -> {ok, false};
+ %% some Python clients serialize True/False as "True"/"False"
+ {value, {_, "True"}} -> {ok, true};
+ {value, {_, "False"}} -> {ok, false};
+ _ -> not_found
+ end.
+
+boolean_header(F, K, D) -> default_value(boolean_header(F, K), D).
+
+internal_integer_header(Headers, Key) ->
+ case lists:keysearch(Key, 1, Headers) of
+ {value, {_, Str}} -> {ok, list_to_integer(string:strip(Str))};
+ _ -> not_found
+ end.
+
+integer_header(#stomp_frame{headers = Headers}, Key) ->
+ internal_integer_header(Headers, Key).
+
+integer_header(F, K, D) -> default_value(integer_header(F, K), D).
+
+binary_header(F, K) ->
+ case header(F, K) of
+ {ok, Str} -> {ok, list_to_binary(Str)};
+ not_found -> not_found
+ end.
+
+binary_header(F, K, D) -> default_value(binary_header(F, K), D).
+
+serialize(Frame) ->
+ serialize(Frame, true).
+
+%% second argument controls whether a trailing linefeed
+%% character should be added, see rabbitmq/rabbitmq-stomp#39.
+serialize(Frame, true) ->
+ serialize(Frame, false) ++ [?LF];
+serialize(#stomp_frame{command = Command,
+ headers = Headers,
+ body_iolist = BodyFragments}, false) ->
+ Len = iolist_size(BodyFragments),
+ [Command, ?LF,
+ lists:map(fun serialize_header/1,
+ lists:keydelete(?HEADER_CONTENT_LENGTH, 1, Headers)),
+ if
+ Len > 0 -> [?HEADER_CONTENT_LENGTH ++ ":", integer_to_list(Len), ?LF];
+ true -> []
+ end,
+ ?LF, BodyFragments, 0].
+
+serialize_header({K, V}) when is_integer(V) -> hdr(escape(K), integer_to_list(V));
+serialize_header({K, V}) when is_boolean(V) -> hdr(escape(K), boolean_to_list(V));
+serialize_header({K, V}) when is_list(V) -> hdr(escape(K), escape(V)).
+
+boolean_to_list(true) -> "true";
+boolean_to_list(_) -> "false".
+
+hdr(K, V) -> [K, ?COLON, V, ?LF].
+
+escape(Str) -> [escape1(Ch) || Ch <- Str].
+
+escape1(?COLON) -> [?BSL, ?COLON_ESC];
+escape1(?BSL) -> [?BSL, ?BSL_ESC];
+escape1(?LF) -> [?BSL, ?LF_ESC];
+escape1(?CR) -> [?BSL, ?CR_ESC];
+escape1(Ch) -> Ch.
+
+firstnull(Content) -> firstnull(Content, 0).
+
+firstnull(<<>>, _N) -> -1;
+firstnull(<<0, _Rest/binary>>, N) -> N;
+firstnull(<<_Ch, Rest/binary>>, N) -> firstnull(Rest, N+1).
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl
new file mode 100644
index 0000000000..47331312ce
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_internal_event_handler.erl
@@ -0,0 +1,46 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stomp_internal_event_handler).
+
+-behaviour(gen_event).
+
+-export([init/1, handle_event/2, handle_call/2, handle_info/2, terminate/2, code_change/3]).
+
+-import(rabbit_misc, [pget/2]).
+
+init([]) ->
+ {ok, []}.
+
+handle_event({event, maintenance_connections_closed, _Info, _, _}, State) ->
+ %% we should close our connections
+ {ok, NConnections} = rabbit_stomp:close_all_client_connections("node is being put into maintenance mode"),
+ rabbit_log:alert("Closed ~b local STOMP client connections", [NConnections]),
+ {ok, State};
+handle_event(_Event, State) ->
+ {ok, State}.
+
+handle_call(_Request, State) ->
+ {ok, State}.
+
+handle_info(_Info, State) ->
+ {ok, State}.
+
+terminate(_Reason, _State) ->
+ ok.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl
new file mode 100644
index 0000000000..570a7a146a
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_processor.erl
@@ -0,0 +1,1220 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stomp_processor).
+
+-export([initial_state/2, process_frame/2, flush_and_die/1]).
+-export([flush_pending_receipts/3,
+ handle_exit/3,
+ cancel_consumer/2,
+ send_delivery/5]).
+
+-export([adapter_name/1]).
+-export([info/2]).
+
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("amqp_client/include/rabbit_routing_prefixes.hrl").
+-include("rabbit_stomp_frame.hrl").
+-include("rabbit_stomp.hrl").
+-include("rabbit_stomp_headers.hrl").
+
+-record(proc_state, {session_id, channel, connection, subscriptions,
+ version, start_heartbeat_fun, pending_receipts,
+ config, route_state, reply_queues, frame_transformer,
+ adapter_info, send_fun, ssl_login_name, peer_addr,
+ %% see rabbitmq/rabbitmq-stomp#39
+ trailing_lf, auth_mechanism, auth_login,
+ default_topic_exchange, default_nack_requeue}).
+
+-record(subscription, {dest_hdr, ack_mode, multi_ack, description}).
+
+-define(FLUSH_TIMEOUT, 60000).
+
+adapter_name(State) ->
+ #proc_state{adapter_info = #amqp_adapter_info{name = Name}} = State,
+ Name.
+
+%%----------------------------------------------------------------------------
+
+-spec initial_state(
+ #stomp_configuration{},
+ {SendFun, AdapterInfo, SSLLoginName, PeerAddr})
+ -> #proc_state{}
+ when SendFun :: fun((atom(), binary()) -> term()),
+ AdapterInfo :: #amqp_adapter_info{},
+ SSLLoginName :: atom() | binary(),
+ PeerAddr :: inet:ip_address().
+
+-type process_frame_result() ::
+ {ok, term(), #proc_state{}} |
+ {stop, term(), #proc_state{}}.
+
+-spec process_frame(#stomp_frame{}, #proc_state{}) ->
+ process_frame_result().
+
+-spec flush_and_die(#proc_state{}) -> #proc_state{}.
+
+-spec command({Command, Frame}, State) -> process_frame_result()
+ when Command :: string(),
+ Frame :: #stomp_frame{},
+ State :: #proc_state{}.
+
+-type process_fun() :: fun((#proc_state{}) ->
+ {ok, #stomp_frame{}, #proc_state{}} |
+ {error, string(), string(), #proc_state{}} |
+ {stop, term(), #proc_state{}}).
+-spec process_request(process_fun(), fun((#proc_state{}) -> #proc_state{}), #proc_state{}) ->
+ process_frame_result().
+
+-spec flush_pending_receipts(DeliveryTag, IsMulti, State) -> State
+ when State :: #proc_state{},
+ DeliveryTag :: term(),
+ IsMulti :: boolean().
+
+-spec handle_exit(From, Reason, State) -> unknown_exit | {stop, Reason, State}
+ when State :: #proc_state{},
+ From :: pid(),
+ Reason :: term().
+
+-spec cancel_consumer(binary(), #proc_state{}) -> process_frame_result().
+
+-spec send_delivery(#'basic.deliver'{}, term(), term(), term(),
+ #proc_state{}) -> #proc_state{}.
+
+%%----------------------------------------------------------------------------
+
+
+%%----------------------------------------------------------------------------
+%% Public API
+%%----------------------------------------------------------------------------
+
+process_frame(Frame = #stomp_frame{command = Command}, State) ->
+ command({Command, Frame}, State).
+
+flush_and_die(State) ->
+ close_connection(State).
+
+info(session_id, #proc_state{session_id = Val}) ->
+ Val;
+info(channel, #proc_state{channel = Val}) -> Val;
+info(version, #proc_state{version = Val}) -> Val;
+info(implicit_connect, #proc_state{config = #stomp_configuration{implicit_connect = Val}}) -> Val;
+info(auth_login, #proc_state{auth_login = Val}) -> Val;
+info(auth_mechanism, #proc_state{auth_mechanism = Val}) -> Val;
+info(peer_addr, #proc_state{peer_addr = Val}) -> Val;
+info(host, #proc_state{adapter_info = #amqp_adapter_info{host = Val}}) -> Val;
+info(port, #proc_state{adapter_info = #amqp_adapter_info{port = Val}}) -> Val;
+info(peer_host, #proc_state{adapter_info = #amqp_adapter_info{peer_host = Val}}) -> Val;
+info(peer_port, #proc_state{adapter_info = #amqp_adapter_info{peer_port = Val}}) -> Val;
+info(protocol, #proc_state{adapter_info = #amqp_adapter_info{protocol = Val}}) ->
+ case Val of
+ {Proto, Version} -> {Proto, rabbit_data_coercion:to_binary(Version)};
+ Other -> Other
+ end;
+info(channels, PState) -> additional_info(channels, PState);
+info(channel_max, PState) -> additional_info(channel_max, PState);
+info(frame_max, PState) -> additional_info(frame_max, PState);
+info(client_properties, PState) -> additional_info(client_properties, PState);
+info(ssl, PState) -> additional_info(ssl, PState);
+info(ssl_protocol, PState) -> additional_info(ssl_protocol, PState);
+info(ssl_key_exchange, PState) -> additional_info(ssl_key_exchange, PState);
+info(ssl_cipher, PState) -> additional_info(ssl_cipher, PState);
+info(ssl_hash, PState) -> additional_info(ssl_hash, PState).
+
+initial_state(Configuration,
+ {SendFun, AdapterInfo0 = #amqp_adapter_info{additional_info = Extra},
+ SSLLoginName, PeerAddr}) ->
+ %% STOMP connections use exactly one channel. The frame max is not
+ %% applicable and there is no way to know what client is used.
+ AdapterInfo = AdapterInfo0#amqp_adapter_info{additional_info=[
+ {channels, 1},
+ {channel_max, 1},
+ {frame_max, 0},
+ %% TODO: can we use a header to make it possible for clients
+ %% to override this value?
+ {client_properties, [{<<"product">>, longstr, <<"STOMP client">>}]}
+ |Extra]},
+ #proc_state {
+ send_fun = SendFun,
+ adapter_info = AdapterInfo,
+ ssl_login_name = SSLLoginName,
+ peer_addr = PeerAddr,
+ session_id = none,
+ channel = none,
+ connection = none,
+ subscriptions = #{},
+ version = none,
+ pending_receipts = undefined,
+ config = Configuration,
+ route_state = rabbit_routing_util:init_state(),
+ reply_queues = #{},
+ frame_transformer = undefined,
+ trailing_lf = application:get_env(rabbitmq_stomp, trailing_lf, true),
+ default_topic_exchange = application:get_env(rabbitmq_stomp, default_topic_exchange, <<"amq.topic">>),
+ default_nack_requeue = application:get_env(rabbitmq_stomp, default_nack_requeue, true)}.
+
+
+command({"STOMP", Frame}, State) ->
+ process_connect(no_implicit, Frame, State);
+
+command({"CONNECT", Frame}, State) ->
+ process_connect(no_implicit, Frame, State);
+
+command(Request, State = #proc_state{channel = none,
+ config = #stomp_configuration{
+ implicit_connect = true}}) ->
+ {ok, State1 = #proc_state{channel = Ch}, _} =
+ process_connect(implicit, #stomp_frame{headers = []}, State),
+ case Ch of
+ none -> {stop, normal, State1};
+ _ -> command(Request, State1)
+ end;
+
+command(_Request, State = #proc_state{channel = none,
+ config = #stomp_configuration{
+ implicit_connect = false}}) ->
+ {ok, send_error("Illegal command",
+ "You must log in using CONNECT first",
+ State), none};
+
+command({Command, Frame}, State = #proc_state{frame_transformer = FT}) ->
+ Frame1 = FT(Frame),
+ process_request(
+ fun(StateN) ->
+ case validate_frame(Command, Frame1, StateN) of
+ R = {error, _, _, _} -> R;
+ _ -> handle_frame(Command, Frame1, StateN)
+ end
+ end,
+ fun(StateM) -> ensure_receipt(Frame1, StateM) end,
+ State).
+
+cancel_consumer(Ctag, State) ->
+ process_request(
+ fun(StateN) -> server_cancel_consumer(Ctag, StateN) end,
+ State).
+
+handle_exit(Conn, {shutdown, {server_initiated_close, Code, Explanation}},
+ State = #proc_state{connection = Conn}) ->
+ amqp_death(Code, Explanation, State);
+handle_exit(Conn, {shutdown, {connection_closing,
+ {server_initiated_close, Code, Explanation}}},
+ State = #proc_state{connection = Conn}) ->
+ amqp_death(Code, Explanation, State);
+handle_exit(Conn, Reason, State = #proc_state{connection = Conn}) ->
+ _ = send_error("AMQP connection died", "Reason: ~p", [Reason], State),
+ {stop, {conn_died, Reason}, State};
+
+handle_exit(Ch, {shutdown, {server_initiated_close, Code, Explanation}},
+ State = #proc_state{channel = Ch}) ->
+ amqp_death(Code, Explanation, State);
+
+handle_exit(Ch, Reason, State = #proc_state{channel = Ch}) ->
+ _ = send_error("AMQP channel died", "Reason: ~p", [Reason], State),
+ {stop, {channel_died, Reason}, State};
+handle_exit(Ch, {shutdown, {server_initiated_close, Code, Explanation}},
+ State = #proc_state{channel = Ch}) ->
+ amqp_death(Code, Explanation, State);
+handle_exit(_, _, _) -> unknown_exit.
+
+
+process_request(ProcessFun, State) ->
+ process_request(ProcessFun, fun (StateM) -> StateM end, State).
+
+
+process_request(ProcessFun, SuccessFun, State) ->
+ Res = case catch ProcessFun(State) of
+ {'EXIT',
+ {{shutdown,
+ {server_initiated_close, ReplyCode, Explanation}}, _}} ->
+ amqp_death(ReplyCode, Explanation, State);
+ {'EXIT', {amqp_error, access_refused, Msg, _}} ->
+ amqp_death(access_refused, Msg, State);
+ {'EXIT', Reason} ->
+ priv_error("Processing error", "Processing error",
+ Reason, State);
+ Result ->
+ Result
+ end,
+ case Res of
+ {ok, Frame, NewState = #proc_state{connection = Conn}} ->
+ _ = case Frame of
+ none -> ok;
+ _ -> send_frame(Frame, NewState)
+ end,
+ {ok, SuccessFun(NewState), Conn};
+ {error, Message, Detail, NewState = #proc_state{connection = Conn}} ->
+ {ok, send_error(Message, Detail, NewState), Conn};
+ {stop, normal, NewState} ->
+ {stop, normal, SuccessFun(NewState)};
+ {stop, R, NewState} ->
+ {stop, R, NewState}
+ end.
+
+process_connect(Implicit, Frame,
+ State = #proc_state{channel = none,
+ config = Config,
+ ssl_login_name = SSLLoginName,
+ adapter_info = AdapterInfo}) ->
+ process_request(
+ fun(StateN) ->
+ case negotiate_version(Frame) of
+ {ok, Version} ->
+ FT = frame_transformer(Version),
+ Frame1 = FT(Frame),
+ {Auth, {Username, Passwd}} = creds(Frame1, SSLLoginName, Config),
+ {ok, DefaultVHost} = application:get_env(
+ rabbitmq_stomp, default_vhost),
+ {ProtoName, _} = AdapterInfo#amqp_adapter_info.protocol,
+ Res = do_login(
+ Username, Passwd,
+ login_header(Frame1, ?HEADER_HOST, DefaultVHost),
+ login_header(Frame1, ?HEADER_HEART_BEAT, "0,0"),
+ AdapterInfo#amqp_adapter_info{
+ protocol = {ProtoName, Version}}, Version,
+ StateN#proc_state{frame_transformer = FT,
+ auth_mechanism = Auth,
+ auth_login = Username}),
+ case {Res, Implicit} of
+ {{ok, _, StateN1}, implicit} -> ok(StateN1);
+ _ -> Res
+ end;
+ {error, no_common_version} ->
+ error("Version mismatch",
+ "Supported versions are ~s~n",
+ [string:join(?SUPPORTED_VERSIONS, ",")],
+ StateN)
+ end
+ end,
+ State).
+
+creds(_, _, #stomp_configuration{default_login = DefLogin,
+ default_passcode = DefPasscode,
+ force_default_creds = true}) ->
+ {config, {iolist_to_binary(DefLogin), iolist_to_binary(DefPasscode)}};
+creds(Frame, SSLLoginName,
+ #stomp_configuration{default_login = DefLogin,
+ default_passcode = DefPasscode}) ->
+ PasswordCreds = {login_header(Frame, ?HEADER_LOGIN, DefLogin),
+ login_header(Frame, ?HEADER_PASSCODE, DefPasscode)},
+ case {rabbit_stomp_frame:header(Frame, ?HEADER_LOGIN), SSLLoginName} of
+ {not_found, none} -> {config, PasswordCreds};
+ {not_found, SSLName} -> {ssl, {SSLName, none}};
+ _ -> {stomp_headers, PasswordCreds}
+ end.
+
+login_header(Frame, Key, Default) when is_binary(Default) ->
+ login_header(Frame, Key, binary_to_list(Default));
+login_header(Frame, Key, Default) ->
+ case rabbit_stomp_frame:header(Frame, Key, Default) of
+ undefined -> undefined;
+ Hdr -> list_to_binary(Hdr)
+ end.
+
+%%----------------------------------------------------------------------------
+%% Frame Transformation
+%%----------------------------------------------------------------------------
+
+frame_transformer("1.0") -> fun rabbit_stomp_util:trim_headers/1;
+frame_transformer(_) -> fun(Frame) -> Frame end.
+
+%%----------------------------------------------------------------------------
+%% Frame Validation
+%%----------------------------------------------------------------------------
+
+report_missing_id_header(State) ->
+ error("Missing Header",
+ "Header 'id' is required for durable subscriptions", State).
+
+validate_frame(Command, Frame, State)
+ when Command =:= "SUBSCRIBE" orelse Command =:= "UNSUBSCRIBE" ->
+ Hdr = fun(Name) -> rabbit_stomp_frame:header(Frame, Name) end,
+ case {Hdr(?HEADER_DURABLE), Hdr(?HEADER_PERSISTENT), Hdr(?HEADER_ID)} of
+ {{ok, "true"}, _, not_found} ->
+ report_missing_id_header(State);
+ {_, {ok, "true"}, not_found} ->
+ report_missing_id_header(State);
+ _ ->
+ ok(State)
+ end;
+validate_frame(_Command, _Frame, State) ->
+ ok(State).
+
+%%----------------------------------------------------------------------------
+%% Frame handlers
+%%----------------------------------------------------------------------------
+
+handle_frame("DISCONNECT", _Frame, State) ->
+ {stop, normal, close_connection(State)};
+
+handle_frame("SUBSCRIBE", Frame, State) ->
+ with_destination("SUBSCRIBE", Frame, State, fun do_subscribe/4);
+
+handle_frame("UNSUBSCRIBE", Frame, State) ->
+ ConsumerTag = rabbit_stomp_util:consumer_tag(Frame),
+ cancel_subscription(ConsumerTag, Frame, State);
+
+handle_frame("SEND", Frame, State) ->
+ without_headers(?HEADERS_NOT_ON_SEND, "SEND", Frame, State,
+ fun (_Command, Frame1, State1) ->
+ with_destination("SEND", Frame1, State1, fun do_send/4)
+ end);
+
+handle_frame("ACK", Frame, State) ->
+ ack_action("ACK", Frame, State, fun create_ack_method/3);
+
+handle_frame("NACK", Frame, State) ->
+ ack_action("NACK", Frame, State, fun create_nack_method/3);
+
+handle_frame("BEGIN", Frame, State) ->
+ transactional_action(Frame, "BEGIN", fun begin_transaction/2, State);
+
+handle_frame("COMMIT", Frame, State) ->
+ transactional_action(Frame, "COMMIT", fun commit_transaction/2, State);
+
+handle_frame("ABORT", Frame, State) ->
+ transactional_action(Frame, "ABORT", fun abort_transaction/2, State);
+
+handle_frame(Command, _Frame, State) ->
+ error("Bad command",
+ "Could not interpret command ~p~n",
+ [Command],
+ State).
+
+%%----------------------------------------------------------------------------
+%% Internal helpers for processing frames callbacks
+%%----------------------------------------------------------------------------
+
+ack_action(Command, Frame,
+ State = #proc_state{subscriptions = Subs,
+ channel = Channel,
+ version = Version,
+ default_nack_requeue = DefaultNackRequeue}, MethodFun) ->
+ AckHeader = rabbit_stomp_util:ack_header_name(Version),
+ case rabbit_stomp_frame:header(Frame, AckHeader) of
+ {ok, AckValue} ->
+ case rabbit_stomp_util:parse_message_id(AckValue) of
+ {ok, {ConsumerTag, _SessionId, DeliveryTag}} ->
+ case maps:find(ConsumerTag, Subs) of
+ {ok, Sub} ->
+ Requeue = rabbit_stomp_frame:boolean_header(Frame, "requeue", DefaultNackRequeue),
+ Method = MethodFun(DeliveryTag, Sub, Requeue),
+ case transactional(Frame) of
+ {yes, Transaction} ->
+ extend_transaction(
+ Transaction, {Method}, State);
+ no ->
+ amqp_channel:call(Channel, Method),
+ ok(State)
+ end;
+ error ->
+ error("Subscription not found",
+ "Message with id ~p has no subscription",
+ [AckValue],
+ State)
+ end;
+ _ ->
+ error("Invalid header",
+ "~p must include a valid ~p header~n",
+ [Command, AckHeader],
+ State)
+ end;
+ not_found ->
+ error("Missing header",
+ "~p must include the ~p header~n",
+ [Command, AckHeader],
+ State)
+ end.
+
+%%----------------------------------------------------------------------------
+%% Internal helpers for processing frames callbacks
+%%----------------------------------------------------------------------------
+
+server_cancel_consumer(ConsumerTag, State = #proc_state{subscriptions = Subs}) ->
+ case maps:find(ConsumerTag, Subs) of
+ error ->
+ error("Server cancelled unknown subscription",
+ "Consumer tag ~p is not associated with a subscription.~n",
+ [ConsumerTag],
+ State);
+ {ok, Subscription = #subscription{description = Description}} ->
+ Id = case rabbit_stomp_util:tag_to_id(ConsumerTag) of
+ {ok, {_, Id1}} -> Id1;
+ {error, {_, Id1}} -> "Unknown[" ++ Id1 ++ "]"
+ end,
+ _ = send_error_frame("Server cancelled subscription",
+ [{?HEADER_SUBSCRIPTION, Id}],
+ "The server has canceled a subscription.~n"
+ "No more messages will be delivered for ~p.~n",
+ [Description],
+ State),
+ tidy_canceled_subscription(ConsumerTag, Subscription,
+ undefined, State)
+ end.
+
+cancel_subscription({error, invalid_prefix}, _Frame, State) ->
+ error("Invalid id",
+ "UNSUBSCRIBE 'id' may not start with ~s~n",
+ [?TEMP_QUEUE_ID_PREFIX],
+ State);
+
+cancel_subscription({error, _}, _Frame, State) ->
+ error("Missing destination or id",
+ "UNSUBSCRIBE must include a 'destination' or 'id' header",
+ State);
+
+cancel_subscription({ok, ConsumerTag, Description}, Frame,
+ State = #proc_state{subscriptions = Subs,
+ channel = Channel}) ->
+ case maps:find(ConsumerTag, Subs) of
+ error ->
+ error("No subscription found",
+ "UNSUBSCRIBE must refer to an existing subscription.~n"
+ "Subscription to ~p not found.~n",
+ [Description],
+ State);
+ {ok, Subscription = #subscription{description = Descr}} ->
+ case amqp_channel:call(Channel,
+ #'basic.cancel'{
+ consumer_tag = ConsumerTag}) of
+ #'basic.cancel_ok'{consumer_tag = ConsumerTag} ->
+ tidy_canceled_subscription(ConsumerTag, Subscription,
+ Frame, State);
+ _ ->
+ error("Failed to cancel subscription",
+ "UNSUBSCRIBE to ~p failed.~n",
+ [Descr],
+ State)
+ end
+ end.
+
+%% Server-initiated cancelations will pass an undefined instead of a
+%% STOMP frame. In this case we know that the queue was deleted and
+%% thus we don't have to clean it up.
+tidy_canceled_subscription(ConsumerTag, _Subscription,
+ undefined, State = #proc_state{subscriptions = Subs}) ->
+ Subs1 = maps:remove(ConsumerTag, Subs),
+ ok(State#proc_state{subscriptions = Subs1});
+
+%% Client-initiated cancelations will pass an actual frame
+tidy_canceled_subscription(ConsumerTag, #subscription{dest_hdr = DestHdr},
+ Frame, State = #proc_state{subscriptions = Subs}) ->
+ Subs1 = maps:remove(ConsumerTag, Subs),
+ {ok, Dest} = rabbit_routing_util:parse_endpoint(DestHdr),
+ maybe_delete_durable_sub(Dest, Frame, State#proc_state{subscriptions = Subs1}).
+
+maybe_delete_durable_sub({topic, Name}, Frame,
+ State = #proc_state{channel = Channel}) ->
+ case rabbit_stomp_util:has_durable_header(Frame) of
+ true ->
+ {ok, Id} = rabbit_stomp_frame:header(Frame, ?HEADER_ID),
+ QName = rabbit_stomp_util:subscription_queue_name(Name, Id, Frame),
+ amqp_channel:call(Channel,
+ #'queue.delete'{queue = list_to_binary(QName),
+ nowait = false}),
+ ok(State);
+ false ->
+ ok(State)
+ end;
+maybe_delete_durable_sub(_Destination, _Frame, State) ->
+ ok(State).
+
+with_destination(Command, Frame, State, Fun) ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_DESTINATION) of
+ {ok, DestHdr} ->
+ case rabbit_routing_util:parse_endpoint(DestHdr) of
+ {ok, Destination} ->
+ case Fun(Destination, DestHdr, Frame, State) of
+ {error, invalid_endpoint} ->
+ error("Invalid destination",
+ "'~s' is not a valid destination for '~s'~n",
+ [DestHdr, Command],
+ State);
+ {error, {invalid_destination, Msg}} ->
+ error("Invalid destination",
+ "~s",
+ [Msg],
+ State);
+ {error, Reason} ->
+ throw(Reason);
+ Result ->
+ Result
+ end;
+ {error, {invalid_destination, Type, Content}} ->
+ error("Invalid destination",
+ "'~s' is not a valid ~p destination~n",
+ [Content, Type],
+ State);
+ {error, {unknown_destination, Content}} ->
+ error("Unknown destination",
+ "'~s' is not a valid destination.~n"
+ "Valid destination types are: ~s.~n",
+ [Content,
+ string:join(rabbit_routing_util:all_dest_prefixes(),
+ ", ")], State)
+ end;
+ not_found ->
+ error("Missing destination",
+ "~p must include a 'destination' header~n",
+ [Command],
+ State)
+ end.
+
+without_headers([Hdr | Hdrs], Command, Frame, State, Fun) ->
+ case rabbit_stomp_frame:header(Frame, Hdr) of
+ {ok, _} ->
+ error("Invalid header",
+ "'~s' is not allowed on '~s'.~n",
+ [Hdr, Command],
+ State);
+ not_found ->
+ without_headers(Hdrs, Command, Frame, State, Fun)
+ end;
+without_headers([], Command, Frame, State, Fun) ->
+ Fun(Command, Frame, State).
+
+do_login(undefined, _, _, _, _, _, State) ->
+ error("Bad CONNECT", "Missing login or passcode header(s)", State);
+do_login(Username, Passwd, VirtualHost, Heartbeat, AdapterInfo, Version,
+ State = #proc_state{peer_addr = Addr}) ->
+ case start_connection(
+ #amqp_params_direct{username = Username,
+ password = Passwd,
+ virtual_host = VirtualHost,
+ adapter_info = AdapterInfo}, Username, Addr) of
+ {ok, Connection} ->
+ link(Connection),
+ {ok, Channel} = amqp_connection:open_channel(Connection),
+ link(Channel),
+ amqp_channel:enable_delivery_flow_control(Channel),
+ SessionId = rabbit_guid:string(rabbit_guid:gen_secure(), "session"),
+ {SendTimeout, ReceiveTimeout} = ensure_heartbeats(Heartbeat),
+
+ Headers = [{?HEADER_SESSION, SessionId},
+ {?HEADER_HEART_BEAT,
+ io_lib:format("~B,~B", [SendTimeout, ReceiveTimeout])},
+ {?HEADER_VERSION, Version}],
+ ok("CONNECTED",
+ case application:get_env(rabbitmq_stomp, hide_server_info, false) of
+ true -> Headers;
+ false -> [{?HEADER_SERVER, server_header()} | Headers]
+ end,
+ "",
+ State#proc_state{session_id = SessionId,
+ channel = Channel,
+ connection = Connection,
+ version = Version});
+ {error, {auth_failure, _}} ->
+ rabbit_log:warning("STOMP login failed for user ~p~n",
+ [binary_to_list(Username)]),
+ error("Bad CONNECT", "Access refused for user '" ++
+ binary_to_list(Username) ++ "'~n", [], State);
+ {error, not_allowed} ->
+ rabbit_log:warning("STOMP login failed - not_allowed "
+ "(vhost access not allowed)~n"),
+ error("Bad CONNECT", "Virtual host '" ++
+ binary_to_list(VirtualHost) ++
+ "' access denied", State);
+ {error, access_refused} ->
+ rabbit_log:warning("STOMP login failed - access_refused "
+ "(vhost access not allowed)~n"),
+ error("Bad CONNECT", "Virtual host '" ++
+ binary_to_list(VirtualHost) ++
+ "' access denied", State);
+ {error, not_loopback} ->
+ rabbit_log:warning("STOMP login failed - access_refused "
+ "(user must access over loopback)~n"),
+ error("Bad CONNECT", "non-loopback access denied", State)
+ end.
+
+start_connection(Params, Username, Addr) ->
+ case amqp_connection:start(Params) of
+ {ok, Conn} -> case rabbit_access_control:check_user_loopback(
+ Username, Addr) of
+ ok -> {ok, Conn};
+ not_allowed -> amqp_connection:close(Conn),
+ {error, not_loopback}
+ end;
+ {error, E} -> {error, E}
+ end.
+
+server_header() ->
+ {ok, Product} = application:get_key(rabbit, description),
+ {ok, Version} = application:get_key(rabbit, vsn),
+ rabbit_misc:format("~s/~s", [Product, Version]).
+
+do_subscribe(Destination, DestHdr, Frame,
+ State = #proc_state{subscriptions = Subs,
+ route_state = RouteState,
+ channel = Channel,
+ default_topic_exchange = DfltTopicEx}) ->
+ check_subscription_access(Destination, State),
+ Prefetch =
+ rabbit_stomp_frame:integer_header(Frame, ?HEADER_PREFETCH_COUNT,
+ undefined),
+ {AckMode, IsMulti} = rabbit_stomp_util:ack_mode(Frame),
+ case ensure_endpoint(source, Destination, Frame, Channel, RouteState) of
+ {ok, Queue, RouteState1} ->
+ {ok, ConsumerTag, Description} =
+ rabbit_stomp_util:consumer_tag(Frame),
+ case Prefetch of
+ undefined -> ok;
+ _ -> amqp_channel:call(
+ Channel, #'basic.qos'{prefetch_count = Prefetch})
+ end,
+ case maps:find(ConsumerTag, Subs) of
+ {ok, _} ->
+ Message = "Duplicated subscription identifier",
+ Detail = "A subscription identified by '~s' already exists.",
+ _ = error(Message, Detail, [ConsumerTag], State),
+ _ = send_error(Message, Detail, [ConsumerTag], State),
+ {stop, normal, close_connection(State)};
+ error ->
+ ExchangeAndKey = parse_routing(Destination, DfltTopicEx),
+ try
+ amqp_channel:subscribe(Channel,
+ #'basic.consume'{
+ queue = Queue,
+ consumer_tag = ConsumerTag,
+ no_local = false,
+ no_ack = (AckMode == auto),
+ exclusive = false,
+ arguments = []},
+ self()),
+ ok = rabbit_routing_util:ensure_binding(
+ Queue, ExchangeAndKey, Channel)
+ catch exit:Err ->
+ %% it's safe to delete this queue, it
+ %% was server-named and declared by us
+ case Destination of
+ {exchange, _} ->
+ ok = maybe_clean_up_queue(Queue, State);
+ {topic, _} ->
+ ok = maybe_clean_up_queue(Queue, State);
+ _ ->
+ ok
+ end,
+ exit(Err)
+ end,
+ ok(State#proc_state{subscriptions =
+ maps:put(
+ ConsumerTag,
+ #subscription{dest_hdr = DestHdr,
+ ack_mode = AckMode,
+ multi_ack = IsMulti,
+ description = Description},
+ Subs),
+ route_state = RouteState1})
+ end;
+ {error, _} = Err ->
+ Err
+ end.
+
+check_subscription_access(Destination = {topic, _Topic},
+ #proc_state{auth_login = _User,
+ connection = Connection,
+ default_topic_exchange = DfltTopicEx}) ->
+ [{amqp_params, AmqpParams}, {internal_user, InternalUser = #user{username = Username}}] =
+ amqp_connection:info(Connection, [amqp_params, internal_user]),
+ #amqp_params_direct{virtual_host = VHost} = AmqpParams,
+ {Exchange, RoutingKey} = parse_routing(Destination, DfltTopicEx),
+ Resource = #resource{virtual_host = VHost,
+ kind = topic,
+ name = rabbit_data_coercion:to_binary(Exchange)},
+ Context = #{routing_key => rabbit_data_coercion:to_binary(RoutingKey),
+ variable_map => #{<<"vhost">> => VHost, <<"username">> => Username}
+ },
+ rabbit_access_control:check_topic_access(InternalUser, Resource, read, Context);
+check_subscription_access(_, _) ->
+ authorized.
+
+maybe_clean_up_queue(Queue, #proc_state{connection = Connection}) ->
+ {ok, Channel} = amqp_connection:open_channel(Connection),
+ catch amqp_channel:call(Channel, #'queue.delete'{queue = Queue}),
+ catch amqp_channel:close(Channel),
+ ok.
+
+do_send(Destination, _DestHdr,
+ Frame = #stomp_frame{body_iolist = BodyFragments},
+ State = #proc_state{channel = Channel,
+ route_state = RouteState,
+ default_topic_exchange = DfltTopicEx}) ->
+ case ensure_endpoint(dest, Destination, Frame, Channel, RouteState) of
+
+ {ok, _Q, RouteState1} ->
+
+ {Frame1, State1} =
+ ensure_reply_to(Frame, State#proc_state{route_state = RouteState1}),
+
+ Props = rabbit_stomp_util:message_properties(Frame1),
+
+ {Exchange, RoutingKey} = parse_routing(Destination, DfltTopicEx),
+
+ Method = #'basic.publish'{
+ exchange = list_to_binary(Exchange),
+ routing_key = list_to_binary(RoutingKey),
+ mandatory = false,
+ immediate = false},
+
+ case transactional(Frame1) of
+ {yes, Transaction} ->
+ extend_transaction(
+ Transaction,
+ fun(StateN) ->
+ maybe_record_receipt(Frame1, StateN)
+ end,
+ {Method, Props, BodyFragments},
+ State1);
+ no ->
+ ok(send_method(Method, Props, BodyFragments,
+ maybe_record_receipt(Frame1, State1)))
+ end;
+
+ {error, _} = Err ->
+
+ Err
+ end.
+
+create_ack_method(DeliveryTag, #subscription{multi_ack = IsMulti}, _) ->
+ #'basic.ack'{delivery_tag = DeliveryTag,
+ multiple = IsMulti}.
+
+create_nack_method(DeliveryTag, #subscription{multi_ack = IsMulti}, Requeue) ->
+ #'basic.nack'{delivery_tag = DeliveryTag,
+ multiple = IsMulti,
+ requeue = Requeue}.
+
+negotiate_version(Frame) ->
+ ClientVers = re:split(rabbit_stomp_frame:header(
+ Frame, ?HEADER_ACCEPT_VERSION, "1.0"),
+ ",", [{return, list}]),
+ rabbit_stomp_util:negotiate_version(ClientVers, ?SUPPORTED_VERSIONS).
+
+
+send_delivery(Delivery = #'basic.deliver'{consumer_tag = ConsumerTag},
+ Properties, Body, DeliveryCtx,
+ State = #proc_state{
+ session_id = SessionId,
+ subscriptions = Subs,
+ version = Version}) ->
+ NewState = case maps:find(ConsumerTag, Subs) of
+ {ok, #subscription{ack_mode = AckMode}} ->
+ send_frame(
+ "MESSAGE",
+ rabbit_stomp_util:headers(SessionId, Delivery, Properties,
+ AckMode, Version),
+ Body,
+ State);
+ error ->
+ send_error("Subscription not found",
+ "There is no current subscription with tag '~s'.",
+ [ConsumerTag],
+ State)
+ end,
+ notify_received(DeliveryCtx),
+ NewState.
+
+notify_received(undefined) ->
+ %% no notification for quorum queues
+ ok;
+notify_received(DeliveryCtx) ->
+ %% notification for flow control
+ amqp_channel:notify_received(DeliveryCtx).
+
+send_method(Method, Channel, State) ->
+ amqp_channel:call(Channel, Method),
+ State.
+
+send_method(Method, State = #proc_state{channel = Channel}) ->
+ send_method(Method, Channel, State).
+
+send_method(Method, Properties, BodyFragments,
+ State = #proc_state{channel = Channel}) ->
+ send_method(Method, Channel, Properties, BodyFragments, State).
+
+send_method(Method = #'basic.publish'{}, Channel, Properties, BodyFragments,
+ State) ->
+ amqp_channel:cast_flow(
+ Channel, Method,
+ #amqp_msg{props = Properties,
+ payload = list_to_binary(BodyFragments)}),
+ State.
+
+close_connection(State = #proc_state{connection = none}) ->
+ State;
+%% Closing the connection will close the channel and subchannels
+close_connection(State = #proc_state{connection = Connection}) ->
+ %% ignore noproc or other exceptions to avoid debris
+ catch amqp_connection:close(Connection),
+ State#proc_state{channel = none, connection = none, subscriptions = none};
+close_connection(undefined) ->
+ rabbit_log:debug("~s:close_connection: undefined state", [?MODULE]),
+ #proc_state{channel = none, connection = none, subscriptions = none}.
+
+%%----------------------------------------------------------------------------
+%% Reply-To
+%%----------------------------------------------------------------------------
+
+ensure_reply_to(Frame = #stomp_frame{headers = Headers}, State) ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_REPLY_TO) of
+ not_found ->
+ {Frame, State};
+ {ok, ReplyTo} ->
+ {ok, Destination} = rabbit_routing_util:parse_endpoint(ReplyTo),
+ case rabbit_routing_util:dest_temp_queue(Destination) of
+ none ->
+ {Frame, State};
+ TempQueueId ->
+ {ReplyQueue, State1} =
+ ensure_reply_queue(TempQueueId, State),
+ {Frame#stomp_frame{
+ headers = lists:keyreplace(
+ ?HEADER_REPLY_TO, 1, Headers,
+ {?HEADER_REPLY_TO, ReplyQueue})},
+ State1}
+ end
+ end.
+
+ensure_reply_queue(TempQueueId, State = #proc_state{channel = Channel,
+ reply_queues = RQS,
+ subscriptions = Subs}) ->
+ case maps:find(TempQueueId, RQS) of
+ {ok, RQ} ->
+ {binary_to_list(RQ), State};
+ error ->
+ #'queue.declare_ok'{queue = Queue} =
+ amqp_channel:call(Channel,
+ #'queue.declare'{auto_delete = true,
+ exclusive = true}),
+
+ ConsumerTag = rabbit_stomp_util:consumer_tag_reply_to(TempQueueId),
+ #'basic.consume_ok'{} =
+ amqp_channel:subscribe(Channel,
+ #'basic.consume'{
+ queue = Queue,
+ consumer_tag = ConsumerTag,
+ no_ack = true,
+ nowait = false},
+ self()),
+
+ Destination = binary_to_list(Queue),
+
+ %% synthesise a subscription to the reply queue destination
+ Subs1 = maps:put(ConsumerTag,
+ #subscription{dest_hdr = Destination,
+ multi_ack = false},
+ Subs),
+
+ {Destination, State#proc_state{
+ reply_queues = maps:put(TempQueueId, Queue, RQS),
+ subscriptions = Subs1}}
+ end.
+
+%%----------------------------------------------------------------------------
+%% Receipt Handling
+%%----------------------------------------------------------------------------
+
+ensure_receipt(Frame = #stomp_frame{command = Command}, State) ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_RECEIPT) of
+ {ok, Id} -> do_receipt(Command, Id, State);
+ not_found -> State
+ end.
+
+do_receipt("SEND", _, State) ->
+ %% SEND frame receipts are handled when messages are confirmed
+ State;
+do_receipt(_Frame, ReceiptId, State) ->
+ send_frame("RECEIPT", [{"receipt-id", ReceiptId}], "", State).
+
+maybe_record_receipt(Frame, State = #proc_state{channel = Channel,
+ pending_receipts = PR}) ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_RECEIPT) of
+ {ok, Id} ->
+ PR1 = case PR of
+ undefined ->
+ amqp_channel:register_confirm_handler(
+ Channel, self()),
+ #'confirm.select_ok'{} =
+ amqp_channel:call(Channel, #'confirm.select'{}),
+ gb_trees:empty();
+ _ ->
+ PR
+ end,
+ SeqNo = amqp_channel:next_publish_seqno(Channel),
+ State#proc_state{pending_receipts = gb_trees:insert(SeqNo, Id, PR1)};
+ not_found ->
+ State
+ end.
+
+flush_pending_receipts(DeliveryTag, IsMulti,
+ State = #proc_state{pending_receipts = PR}) ->
+ {Receipts, PR1} = accumulate_receipts(DeliveryTag, IsMulti, PR),
+ State1 = lists:foldl(fun(ReceiptId, StateN) ->
+ do_receipt(none, ReceiptId, StateN)
+ end, State, Receipts),
+ State1#proc_state{pending_receipts = PR1}.
+
+accumulate_receipts(DeliveryTag, false, PR) ->
+ case gb_trees:lookup(DeliveryTag, PR) of
+ {value, ReceiptId} -> {[ReceiptId], gb_trees:delete(DeliveryTag, PR)};
+ none -> {[], PR}
+ end;
+
+accumulate_receipts(DeliveryTag, true, PR) ->
+ case gb_trees:is_empty(PR) of
+ true -> {[], PR};
+ false -> accumulate_receipts1(DeliveryTag,
+ gb_trees:take_smallest(PR), [])
+ end.
+
+accumulate_receipts1(DeliveryTag, {Key, Value, PR}, Acc)
+ when Key > DeliveryTag ->
+ {lists:reverse(Acc), gb_trees:insert(Key, Value, PR)};
+accumulate_receipts1(DeliveryTag, {_Key, Value, PR}, Acc) ->
+ Acc1 = [Value | Acc],
+ case gb_trees:is_empty(PR) of
+ true -> {lists:reverse(Acc1), PR};
+ false -> accumulate_receipts1(DeliveryTag,
+ gb_trees:take_smallest(PR), Acc1)
+ end.
+
+%%----------------------------------------------------------------------------
+%% Transaction Support
+%%----------------------------------------------------------------------------
+
+transactional(Frame) ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_TRANSACTION) of
+ {ok, Transaction} -> {yes, Transaction};
+ not_found -> no
+ end.
+
+transactional_action(Frame, Name, Fun, State) ->
+ case transactional(Frame) of
+ {yes, Transaction} ->
+ Fun(Transaction, State);
+ no ->
+ error("Missing transaction",
+ "~p must include a 'transaction' header~n",
+ [Name],
+ State)
+ end.
+
+with_transaction(Transaction, State, Fun) ->
+ case get({transaction, Transaction}) of
+ undefined ->
+ error("Bad transaction",
+ "Invalid transaction identifier: ~p~n",
+ [Transaction],
+ State);
+ Actions ->
+ Fun(Actions, State)
+ end.
+
+begin_transaction(Transaction, State) ->
+ put({transaction, Transaction}, []),
+ ok(State).
+
+extend_transaction(Transaction, Callback, Action, State) ->
+ extend_transaction(Transaction, {callback, Callback, Action}, State).
+
+extend_transaction(Transaction, Action, State0) ->
+ with_transaction(
+ Transaction, State0,
+ fun (Actions, State) ->
+ put({transaction, Transaction}, [Action | Actions]),
+ ok(State)
+ end).
+
+commit_transaction(Transaction, State0) ->
+ with_transaction(
+ Transaction, State0,
+ fun (Actions, State) ->
+ FinalState = lists:foldr(fun perform_transaction_action/2,
+ State,
+ Actions),
+ erase({transaction, Transaction}),
+ ok(FinalState)
+ end).
+
+abort_transaction(Transaction, State0) ->
+ with_transaction(
+ Transaction, State0,
+ fun (_Actions, State) ->
+ erase({transaction, Transaction}),
+ ok(State)
+ end).
+
+perform_transaction_action({callback, Callback, Action}, State) ->
+ perform_transaction_action(Action, Callback(State));
+perform_transaction_action({Method}, State) ->
+ send_method(Method, State);
+perform_transaction_action({Method, Props, BodyFragments}, State) ->
+ send_method(Method, Props, BodyFragments, State).
+
+%%--------------------------------------------------------------------
+%% Heartbeat Management
+%%--------------------------------------------------------------------
+
+ensure_heartbeats(Heartbeats) ->
+
+ [CX, CY] = [list_to_integer(X) ||
+ X <- re:split(Heartbeats, ",", [{return, list}])],
+
+ {SendTimeout, ReceiveTimeout} =
+ {millis_to_seconds(CY), millis_to_seconds(CX)},
+
+ _ = rabbit_stomp_reader:start_heartbeats(self(), {SendTimeout, ReceiveTimeout}),
+ {SendTimeout * 1000 , ReceiveTimeout * 1000}.
+
+millis_to_seconds(M) when M =< 0 -> 0;
+millis_to_seconds(M) when M < 1000 -> 1;
+millis_to_seconds(M) -> M div 1000.
+
+%%----------------------------------------------------------------------------
+%% Queue Setup
+%%----------------------------------------------------------------------------
+
+ensure_endpoint(_Direction, {queue, []}, _Frame, _Channel, _State) ->
+ {error, {invalid_destination, "Destination cannot be blank"}};
+
+ensure_endpoint(source, EndPoint, {_, _, Headers, _} = Frame, Channel, State) ->
+ Params =
+ [{subscription_queue_name_gen,
+ fun () ->
+ Id = build_subscription_id(Frame),
+ % Note: we discard the exchange here so there's no need to use
+ % the default_topic_exchange configuration key
+ {_, Name} = rabbit_routing_util:parse_routing(EndPoint),
+ list_to_binary(rabbit_stomp_util:subscription_queue_name(Name, Id, Frame))
+ end
+ }] ++ rabbit_stomp_util:build_params(EndPoint, Headers),
+ Arguments = rabbit_stomp_util:build_arguments(Headers),
+ rabbit_routing_util:ensure_endpoint(source, Channel, EndPoint,
+ [Arguments | Params], State);
+
+ensure_endpoint(Direction, EndPoint, {_, _, Headers, _}, Channel, State) ->
+ Params = rabbit_stomp_util:build_params(EndPoint, Headers),
+ Arguments = rabbit_stomp_util:build_arguments(Headers),
+ rabbit_routing_util:ensure_endpoint(Direction, Channel, EndPoint,
+ [Arguments | Params], State).
+
+build_subscription_id(Frame) ->
+ case rabbit_stomp_util:has_durable_header(Frame) of
+ true ->
+ {ok, Id} = rabbit_stomp_frame:header(Frame, ?HEADER_ID),
+ Id;
+ false ->
+ rabbit_guid:gen_secure()
+ end.
+
+%%----------------------------------------------------------------------------
+%% Success/error handling
+%%----------------------------------------------------------------------------
+
+ok(State) ->
+ {ok, none, State}.
+
+ok(Command, Headers, BodyFragments, State) ->
+ {ok, #stomp_frame{command = Command,
+ headers = Headers,
+ body_iolist = BodyFragments}, State}.
+
+amqp_death(access_refused = ErrorName, Explanation, State) ->
+ ErrorDesc = rabbit_misc:format("~s~n", [Explanation]),
+ log_error(ErrorName, ErrorDesc, none),
+ {stop, normal, close_connection(send_error(atom_to_list(ErrorName), ErrorDesc, State))};
+amqp_death(ReplyCode, Explanation, State) ->
+ ErrorName = amqp_connection:error_atom(ReplyCode),
+ ErrorDesc = rabbit_misc:format("~s~n", [Explanation]),
+ log_error(ErrorName, ErrorDesc, none),
+ {stop, normal, close_connection(send_error(atom_to_list(ErrorName), ErrorDesc, State))}.
+
+error(Message, Detail, State) ->
+ priv_error(Message, Detail, none, State).
+
+error(Message, Format, Args, State) ->
+ priv_error(Message, Format, Args, none, State).
+
+priv_error(Message, Detail, ServerPrivateDetail, State) ->
+ log_error(Message, Detail, ServerPrivateDetail),
+ {error, Message, Detail, State}.
+
+priv_error(Message, Format, Args, ServerPrivateDetail, State) ->
+ priv_error(Message, rabbit_misc:format(Format, Args), ServerPrivateDetail,
+ State).
+
+log_error(Message, Detail, ServerPrivateDetail) ->
+ rabbit_log:error("STOMP error frame sent:~n"
+ "Message: ~p~n"
+ "Detail: ~p~n"
+ "Server private detail: ~p~n",
+ [Message, Detail, ServerPrivateDetail]).
+
+%%----------------------------------------------------------------------------
+%% Frame sending utilities
+%%----------------------------------------------------------------------------
+
+send_frame(Command, Headers, BodyFragments, State) ->
+ send_frame(#stomp_frame{command = Command,
+ headers = Headers,
+ body_iolist = BodyFragments},
+ State).
+
+send_frame(Frame, State = #proc_state{send_fun = SendFun,
+ trailing_lf = TrailingLF}) ->
+ SendFun(async, rabbit_stomp_frame:serialize(Frame, TrailingLF)),
+ State.
+
+send_error_frame(Message, ExtraHeaders, Format, Args, State) ->
+ send_error_frame(Message, ExtraHeaders, rabbit_misc:format(Format, Args),
+ State).
+
+send_error_frame(Message, ExtraHeaders, Detail, State) ->
+ send_frame("ERROR", [{"message", Message},
+ {"content-type", "text/plain"},
+ {"version", string:join(?SUPPORTED_VERSIONS, ",")}] ++
+ ExtraHeaders,
+ Detail, State).
+
+send_error(Message, Detail, State) ->
+ send_error_frame(Message, [], Detail, State).
+
+send_error(Message, Format, Args, State) ->
+ send_error(Message, rabbit_misc:format(Format, Args), State).
+
+additional_info(Key,
+ #proc_state{adapter_info =
+ #amqp_adapter_info{additional_info = AddInfo}}) ->
+ proplists:get_value(Key, AddInfo).
+
+parse_routing(Destination, DefaultTopicExchange) ->
+ {Exchange0, RoutingKey} = rabbit_routing_util:parse_routing(Destination),
+ Exchange1 = maybe_apply_default_topic_exchange(Exchange0, DefaultTopicExchange),
+ {Exchange1, RoutingKey}.
+
+maybe_apply_default_topic_exchange("amq.topic"=Exchange, <<"amq.topic">>=_DefaultTopicExchange) ->
+ %% This is the case where the destination is the same
+ %% as the default of amq.topic
+ Exchange;
+maybe_apply_default_topic_exchange("amq.topic"=_Exchange, DefaultTopicExchange) ->
+ %% This is the case where the destination would have been
+ %% amq.topic but we have configured a different default
+ binary_to_list(DefaultTopicExchange);
+maybe_apply_default_topic_exchange(Exchange, _DefaultTopicExchange) ->
+ %% This is the case where the destination is different than
+ %% amq.topic, so it must have been specified in the
+ %% message headers
+ Exchange.
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl
new file mode 100644
index 0000000000..8f081d618f
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl
@@ -0,0 +1,465 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stomp_reader).
+-behaviour(gen_server2).
+
+-export([start_link/3]).
+-export([conserve_resources/3]).
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+ code_change/3, terminate/2]).
+-export([start_heartbeats/2]).
+-export([info/2, close_connection/2]).
+-export([ssl_login_name/2]).
+
+-include("rabbit_stomp.hrl").
+-include("rabbit_stomp_frame.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+
+-define(SIMPLE_METRICS, [pid, recv_oct, send_oct, reductions]).
+-define(OTHER_METRICS, [recv_cnt, send_cnt, send_pend, garbage_collection, state,
+ timeout]).
+
+-record(reader_state, {socket, conn_name, parse_state, processor_state, state,
+ conserve_resources, recv_outstanding, stats_timer,
+ parent, connection, heartbeat_sup, heartbeat,
+ timeout_sec %% heartbeat timeout value used, 0 means
+ %% heartbeats are disabled
+ }).
+
+%%----------------------------------------------------------------------------
+
+start_link(SupHelperPid, Ref, Configuration) ->
+ Pid = proc_lib:spawn_link(?MODULE, init,
+ [[SupHelperPid, Ref, Configuration]]),
+ {ok, Pid}.
+
+info(Pid, InfoItems) ->
+ case InfoItems -- ?INFO_ITEMS of
+ [] ->
+ gen_server2:call(Pid, {info, InfoItems});
+ UnknownItems -> throw({bad_argument, UnknownItems})
+ end.
+
+close_connection(Pid, Reason) ->
+ gen_server:cast(Pid, {close_connection, Reason}).
+
+
+init([SupHelperPid, Ref, Configuration]) ->
+ process_flag(trap_exit, true),
+ {ok, Sock} = rabbit_networking:handshake(Ref,
+ application:get_env(rabbitmq_stomp, proxy_protocol, false)),
+ RealSocket = rabbit_net:unwrap_socket(Sock),
+
+ case rabbit_net:connection_string(Sock, inbound) of
+ {ok, ConnStr} ->
+ ProcInitArgs = processor_args(Configuration, Sock),
+ ProcState = rabbit_stomp_processor:initial_state(Configuration,
+ ProcInitArgs),
+
+ rabbit_log_connection:info("accepting STOMP connection ~p (~s)~n",
+ [self(), ConnStr]),
+
+ ParseState = rabbit_stomp_frame:initial_state(),
+ _ = register_resource_alarm(),
+ gen_server2:enter_loop(?MODULE, [],
+ rabbit_event:init_stats_timer(
+ run_socket(control_throttle(
+ #reader_state{socket = RealSocket,
+ conn_name = ConnStr,
+ parse_state = ParseState,
+ processor_state = ProcState,
+ heartbeat_sup = SupHelperPid,
+ heartbeat = {none, none},
+ state = running,
+ conserve_resources = false,
+ recv_outstanding = false})), #reader_state.stats_timer),
+ {backoff, 1000, 1000, 10000});
+ {network_error, Reason} ->
+ rabbit_net:fast_close(RealSocket),
+ terminate({shutdown, Reason}, undefined);
+ {error, enotconn} ->
+ rabbit_net:fast_close(RealSocket),
+ terminate(shutdown, undefined);
+ {error, Reason} ->
+ rabbit_net:fast_close(RealSocket),
+ terminate({network_error, Reason}, undefined)
+ end.
+
+
+handle_call({info, InfoItems}, _From, State) ->
+ Infos = lists:map(
+ fun(InfoItem) ->
+ {InfoItem, info_internal(InfoItem, State)}
+ end,
+ InfoItems),
+ {reply, Infos, State};
+handle_call(Msg, From, State) ->
+ {stop, {stomp_unexpected_call, Msg, From}, State}.
+
+handle_cast({close_connection, Reason}, State) ->
+ {stop, {shutdown, {server_initiated_close, Reason}}, State};
+handle_cast(client_timeout, State) ->
+ {stop, {shutdown, client_heartbeat_timeout}, State};
+handle_cast(Msg, State) ->
+ {stop, {stomp_unexpected_cast, Msg}, State}.
+
+
+handle_info({Tag, Sock, Data}, State=#reader_state{socket=Sock})
+ when Tag =:= tcp; Tag =:= ssl ->
+ case process_received_bytes(Data, State#reader_state{recv_outstanding = false}) of
+ {ok, NewState} ->
+ {noreply, ensure_stats_timer(run_socket(control_throttle(NewState))), hibernate};
+ {stop, Reason, NewState} ->
+ {stop, Reason, NewState}
+ end;
+handle_info({Tag, Sock}, State=#reader_state{socket=Sock})
+ when Tag =:= tcp_closed; Tag =:= ssl_closed ->
+ {stop, normal, State};
+handle_info({Tag, Sock, Reason}, State=#reader_state{socket=Sock})
+ when Tag =:= tcp_error; Tag =:= ssl_error ->
+ {stop, {inet_error, Reason}, State};
+handle_info({inet_reply, _Sock, {error, closed}}, State) ->
+ {stop, normal, State};
+handle_info({inet_reply, _, ok}, State) ->
+ {noreply, State, hibernate};
+handle_info({inet_reply, _, Status}, State) ->
+ {stop, Status, State};
+handle_info(emit_stats, State) ->
+ {noreply, emit_stats(State), hibernate};
+handle_info({conserve_resources, Conserve}, State) ->
+ NewState = State#reader_state{conserve_resources = Conserve},
+ {noreply, run_socket(control_throttle(NewState)), hibernate};
+handle_info({bump_credit, Msg}, State) ->
+ credit_flow:handle_bump_msg(Msg),
+ {noreply, run_socket(control_throttle(State)), hibernate};
+
+%%----------------------------------------------------------------------------
+
+handle_info(client_timeout, State) ->
+ {stop, {shutdown, client_heartbeat_timeout}, State};
+
+%%----------------------------------------------------------------------------
+
+handle_info(#'basic.consume_ok'{}, State) ->
+ {noreply, State, hibernate};
+handle_info(#'basic.cancel_ok'{}, State) ->
+ {noreply, State, hibernate};
+handle_info(#'basic.ack'{delivery_tag = Tag, multiple = IsMulti}, State) ->
+ ProcState = processor_state(State),
+ NewProcState = rabbit_stomp_processor:flush_pending_receipts(Tag,
+ IsMulti,
+ ProcState),
+ {noreply, processor_state(NewProcState, State), hibernate};
+handle_info({Delivery = #'basic.deliver'{},
+ Message = #amqp_msg{}},
+ State) ->
+ %% receiving a message from a quorum queue
+ %% no delivery context
+ handle_info({Delivery, Message, undefined}, State);
+handle_info({Delivery = #'basic.deliver'{},
+ #amqp_msg{props = Props, payload = Payload},
+ DeliveryCtx},
+ State) ->
+ ProcState = processor_state(State),
+ NewProcState = rabbit_stomp_processor:send_delivery(Delivery,
+ Props,
+ Payload,
+ DeliveryCtx,
+ ProcState),
+ {noreply, processor_state(NewProcState, State), hibernate};
+handle_info(#'basic.cancel'{consumer_tag = Ctag}, State) ->
+ ProcState = processor_state(State),
+ case rabbit_stomp_processor:cancel_consumer(Ctag, ProcState) of
+ {ok, NewProcState, _} ->
+ {noreply, processor_state(NewProcState, State), hibernate};
+ {stop, Reason, NewProcState} ->
+ {stop, Reason, processor_state(NewProcState, State)}
+ end;
+
+handle_info({start_heartbeats, {0, 0}}, State) ->
+ {noreply, State#reader_state{timeout_sec = {0, 0}}};
+
+handle_info({start_heartbeats, {SendTimeout, ReceiveTimeout}},
+ State = #reader_state{heartbeat_sup = SupPid, socket = Sock}) ->
+
+ SendFun = fun() -> catch rabbit_net:send(Sock, <<$\n>>) end,
+ Pid = self(),
+ ReceiveFun = fun() -> gen_server2:cast(Pid, client_timeout) end,
+ Heartbeat = rabbit_heartbeat:start(SupPid, Sock, SendTimeout,
+ SendFun, ReceiveTimeout, ReceiveFun),
+ {noreply, State#reader_state{heartbeat = Heartbeat,
+ timeout_sec = {SendTimeout, ReceiveTimeout}}};
+
+
+%%----------------------------------------------------------------------------
+handle_info({'EXIT', From, Reason}, State) ->
+ ProcState = processor_state(State),
+ case rabbit_stomp_processor:handle_exit(From, Reason, ProcState) of
+ {stop, NewReason, NewProcState} ->
+ {stop, NewReason, processor_state(NewProcState, State)};
+ unknown_exit ->
+ {stop, {connection_died, Reason}, State}
+ end.
+%%----------------------------------------------------------------------------
+
+process_received_bytes([], State) ->
+ {ok, State};
+process_received_bytes(Bytes,
+ State = #reader_state{
+ processor_state = ProcState,
+ parse_state = ParseState}) ->
+ case rabbit_stomp_frame:parse(Bytes, ParseState) of
+ {more, ParseState1} ->
+ {ok, State#reader_state{parse_state = ParseState1}};
+ {ok, Frame, Rest} ->
+ case rabbit_stomp_processor:process_frame(Frame, ProcState) of
+ {ok, NewProcState, Conn} ->
+ PS = rabbit_stomp_frame:initial_state(),
+ NextState = maybe_block(State, Frame),
+ process_received_bytes(Rest, NextState#reader_state{
+ processor_state = NewProcState,
+ parse_state = PS,
+ connection = Conn});
+ {stop, Reason, NewProcState} ->
+ {stop, Reason,
+ processor_state(NewProcState, State)}
+ end;
+ {error, Reason} ->
+ %% The parser couldn't parse data. We log the reason right
+ %% now and stop with the reason 'normal' instead of the
+ %% actual parsing error, because the supervisor would log
+ %% a crash report (which is not that useful) and handle
+ %% recovery, but it's too slow.
+ log_reason({network_error, Reason}, State),
+ {stop, normal, State}
+ end.
+
+conserve_resources(Pid, _Source, {_, Conserve, _}) ->
+ Pid ! {conserve_resources, Conserve},
+ ok.
+
+register_resource_alarm() ->
+ rabbit_alarm:register(self(), {?MODULE, conserve_resources, []}).
+
+
+control_throttle(State = #reader_state{state = CS,
+ conserve_resources = Mem,
+ heartbeat = Heartbeat}) ->
+ case {CS, Mem orelse credit_flow:blocked()} of
+ {running, true} -> State#reader_state{state = blocking};
+ {blocking, false} -> rabbit_heartbeat:resume_monitor(Heartbeat),
+ State#reader_state{state = running};
+ {blocked, false} -> rabbit_heartbeat:resume_monitor(Heartbeat),
+ State#reader_state{state = running};
+ {_, _} -> State
+ end.
+
+maybe_block(State = #reader_state{state = blocking, heartbeat = Heartbeat},
+ #stomp_frame{command = "SEND"}) ->
+ rabbit_heartbeat:pause_monitor(Heartbeat),
+ State#reader_state{state = blocked};
+maybe_block(State, _) ->
+ State.
+
+run_socket(State = #reader_state{state = blocked}) ->
+ State;
+run_socket(State = #reader_state{recv_outstanding = true}) ->
+ State;
+run_socket(State = #reader_state{socket = Sock}) ->
+ rabbit_net:setopts(Sock, [{active, once}]),
+ State#reader_state{recv_outstanding = true}.
+
+
+terminate(Reason, undefined) ->
+ log_reason(Reason, undefined),
+ {stop, Reason};
+terminate(Reason, State = #reader_state{processor_state = ProcState}) ->
+ maybe_emit_stats(State),
+ log_reason(Reason, State),
+ _ = rabbit_stomp_processor:flush_and_die(ProcState),
+ {stop, Reason}.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+
+log_reason({network_error, {ssl_upgrade_error, closed}, ConnStr}, _State) ->
+ rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: connection closed~n",
+ [ConnStr]);
+
+
+log_reason({network_error,
+ {ssl_upgrade_error,
+ {tls_alert, "handshake failure"}}, ConnStr}, _State) ->
+ log_tls_alert(handshake_failure, ConnStr);
+log_reason({network_error,
+ {ssl_upgrade_error,
+ {tls_alert, "unknown ca"}}, ConnStr}, _State) ->
+ log_tls_alert(unknown_ca, ConnStr);
+log_reason({network_error,
+ {ssl_upgrade_error,
+ {tls_alert, {Err, _}}}, ConnStr}, _State) ->
+ log_tls_alert(Err, ConnStr);
+log_reason({network_error,
+ {ssl_upgrade_error,
+ {tls_alert, Alert}}, ConnStr}, _State) ->
+ log_tls_alert(Alert, ConnStr);
+log_reason({network_error, {ssl_upgrade_error, Reason}, ConnStr}, _State) ->
+ rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: ~p~n",
+ [ConnStr, Reason]);
+
+log_reason({network_error, Reason, ConnStr}, _State) ->
+ rabbit_log_connection:error("STOMP detected network error on ~s: ~p~n",
+ [ConnStr, Reason]);
+
+log_reason({network_error, Reason}, _State) ->
+ rabbit_log_connection:error("STOMP detected network error: ~p~n", [Reason]);
+
+log_reason({shutdown, client_heartbeat_timeout},
+ #reader_state{ processor_state = ProcState }) ->
+ AdapterName = rabbit_stomp_processor:adapter_name(ProcState),
+ rabbit_log_connection:warning("STOMP detected missed client heartbeat(s) "
+ "on connection ~s, closing it~n", [AdapterName]);
+
+log_reason({shutdown, {server_initiated_close, Reason}},
+ #reader_state{conn_name = ConnName}) ->
+ rabbit_log_connection:info("closing STOMP connection ~p (~s), reason: ~s~n",
+ [self(), ConnName, Reason]);
+
+log_reason(normal, #reader_state{conn_name = ConnName}) ->
+ rabbit_log_connection:info("closing STOMP connection ~p (~s)~n", [self(), ConnName]);
+
+log_reason(shutdown, undefined) ->
+ rabbit_log_connection:error("closing STOMP connection that never completed connection handshake (negotiation)~n", []);
+
+log_reason(Reason, #reader_state{processor_state = ProcState}) ->
+ AdapterName = rabbit_stomp_processor:adapter_name(ProcState),
+ rabbit_log_connection:warning("STOMP connection ~s terminated"
+ " with reason ~p, closing it~n", [AdapterName, Reason]).
+
+log_tls_alert(handshake_failure, ConnStr) ->
+ rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: handshake failure~n",
+ [ConnStr]);
+log_tls_alert(unknown_ca, ConnStr) ->
+ rabbit_log_connection:error("STOMP detected TLS certificate verification error on ~s: alert 'unknown CA'~n",
+ [ConnStr]);
+log_tls_alert(Alert, ConnStr) ->
+ rabbit_log_connection:error("STOMP detected TLS upgrade error on ~s: alert ~s~n",
+ [ConnStr, Alert]).
+
+
+%%----------------------------------------------------------------------------
+
+processor_args(Configuration, Sock) ->
+ RealSocket = rabbit_net:unwrap_socket(Sock),
+ SendFun = fun (sync, IoData) ->
+ %% no messages emitted
+ catch rabbit_net:send(RealSocket, IoData);
+ (async, IoData) ->
+ %% {inet_reply, _, _} will appear soon
+ %% We ignore certain errors here, as we will be
+ %% receiving an asynchronous notification of the
+ %% same (or a related) fault shortly anyway. See
+ %% bug 21365.
+ catch rabbit_net:port_command(RealSocket, IoData)
+ end,
+ {ok, {PeerAddr, _PeerPort}} = rabbit_net:sockname(RealSocket),
+ {SendFun, adapter_info(Sock),
+ ssl_login_name(RealSocket, Configuration), PeerAddr}.
+
+adapter_info(Sock) ->
+ amqp_connection:socket_adapter_info(Sock, {'STOMP', 0}).
+
+ssl_login_name(_Sock, #stomp_configuration{ssl_cert_login = false}) ->
+ none;
+ssl_login_name(Sock, #stomp_configuration{ssl_cert_login = true}) ->
+ case rabbit_net:peercert(Sock) of
+ {ok, C} -> case rabbit_ssl:peer_cert_auth_name(C) of
+ unsafe -> none;
+ not_found -> none;
+ Name -> Name
+ end;
+ {error, no_peercert} -> none;
+ nossl -> none
+ end.
+
+%%----------------------------------------------------------------------------
+
+start_heartbeats(_, {0,0} ) -> ok;
+start_heartbeats(Pid, Heartbeat) -> Pid ! {start_heartbeats, Heartbeat}.
+
+maybe_emit_stats(State) ->
+ rabbit_event:if_enabled(State, #reader_state.stats_timer,
+ fun() -> emit_stats(State) end).
+
+emit_stats(State=#reader_state{connection = C}) when C == none; C == undefined ->
+ %% Avoid emitting stats on terminate when the connection has not yet been
+ %% established, as this causes orphan entries on the stats database
+ State1 = rabbit_event:reset_stats_timer(State, #reader_state.stats_timer),
+ ensure_stats_timer(State1);
+emit_stats(State) ->
+ [{_, Pid}, {_, Recv_oct}, {_, Send_oct}, {_, Reductions}] = I
+ = infos(?SIMPLE_METRICS, State),
+ Infos = infos(?OTHER_METRICS, State),
+ rabbit_core_metrics:connection_stats(Pid, Infos),
+ rabbit_core_metrics:connection_stats(Pid, Recv_oct, Send_oct, Reductions),
+ rabbit_event:notify(connection_stats, Infos ++ I),
+ State1 = rabbit_event:reset_stats_timer(State, #reader_state.stats_timer),
+ ensure_stats_timer(State1).
+
+ensure_stats_timer(State = #reader_state{}) ->
+ rabbit_event:ensure_stats_timer(State, #reader_state.stats_timer, emit_stats).
+
+%%----------------------------------------------------------------------------
+
+
+processor_state(#reader_state{ processor_state = ProcState }) -> ProcState.
+processor_state(ProcState, #reader_state{} = State) ->
+ State#reader_state{ processor_state = ProcState}.
+
+%%----------------------------------------------------------------------------
+
+infos(Items, State) -> [{Item, info_internal(Item, State)} || Item <- Items].
+
+info_internal(pid, State) -> info_internal(connection, State);
+info_internal(SockStat, #reader_state{socket = Sock}) when SockStat =:= recv_oct;
+ SockStat =:= recv_cnt;
+ SockStat =:= send_oct;
+ SockStat =:= send_cnt;
+ SockStat =:= send_pend ->
+ case rabbit_net:getstat(Sock, [SockStat]) of
+ {ok, [{_, N}]} when is_number(N) -> N;
+ _ -> 0
+ end;
+info_internal(state, State) -> info_internal(connection_state, State);
+info_internal(garbage_collection, _State) ->
+ rabbit_misc:get_gc_info(self());
+info_internal(reductions, _State) ->
+ {reductions, Reductions} = erlang:process_info(self(), reductions),
+ Reductions;
+info_internal(timeout, #reader_state{timeout_sec = {_, Receive}}) ->
+ Receive;
+info_internal(timeout, #reader_state{timeout_sec = undefined}) ->
+ 0;
+info_internal(conn_name, #reader_state{conn_name = Val}) ->
+ rabbit_data_coercion:to_binary(Val);
+info_internal(connection, #reader_state{connection = Val}) ->
+ Val;
+info_internal(connection_state, #reader_state{state = Val}) ->
+ Val;
+info_internal(Key, #reader_state{processor_state = ProcState}) ->
+ rabbit_stomp_processor:info(Key, ProcState).
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl
new file mode 100644
index 0000000000..ee74569af9
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_sup.erl
@@ -0,0 +1,83 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stomp_sup).
+-behaviour(supervisor).
+
+-export([start_link/2, init/1, stop_listeners/0]).
+
+-define(TCP_PROTOCOL, 'stomp').
+-define(TLS_PROTOCOL, 'stomp/ssl').
+
+start_link(Listeners, Configuration) ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE,
+ [Listeners, Configuration]).
+
+init([{Listeners, SslListeners0}, Configuration]) ->
+ NumTcpAcceptors = application:get_env(rabbitmq_stomp, num_tcp_acceptors, 10),
+ {ok, SocketOpts} = application:get_env(rabbitmq_stomp, tcp_listen_options),
+ {SslOpts, NumSslAcceptors, SslListeners}
+ = case SslListeners0 of
+ [] -> {none, 0, []};
+ _ -> {rabbit_networking:ensure_ssl(),
+ application:get_env(rabbitmq_stomp, num_ssl_acceptors, 10),
+ case rabbit_networking:poodle_check('STOMP') of
+ ok -> SslListeners0;
+ danger -> []
+ end}
+ end,
+ Flags = #{
+ strategy => one_for_all,
+ period => 10,
+ intensity => 10
+ },
+ {ok, {Flags,
+ listener_specs(fun tcp_listener_spec/1,
+ [SocketOpts, Configuration, NumTcpAcceptors], Listeners) ++
+ listener_specs(fun ssl_listener_spec/1,
+ [SocketOpts, SslOpts, Configuration, NumSslAcceptors], SslListeners)}}.
+
+stop_listeners() ->
+ rabbit_networking:stop_ranch_listener_of_protocol(?TCP_PROTOCOL),
+ rabbit_networking:stop_ranch_listener_of_protocol(?TLS_PROTOCOL),
+ ok.
+
+%%
+%% Implementation
+%%
+
+listener_specs(Fun, Args, Listeners) ->
+ [Fun([Address | Args]) ||
+ Listener <- Listeners,
+ Address <- rabbit_networking:tcp_listener_addresses(Listener)].
+
+tcp_listener_spec([Address, SocketOpts, Configuration, NumAcceptors]) ->
+ rabbit_networking:tcp_listener_spec(
+ rabbit_stomp_listener_sup, Address, SocketOpts,
+ transport(?TCP_PROTOCOL), rabbit_stomp_client_sup, Configuration,
+ stomp, NumAcceptors, "STOMP TCP listener").
+
+ssl_listener_spec([Address, SocketOpts, SslOpts, Configuration, NumAcceptors]) ->
+ rabbit_networking:tcp_listener_spec(
+ rabbit_stomp_listener_sup, Address, SocketOpts ++ SslOpts,
+ transport(?TLS_PROTOCOL), rabbit_stomp_client_sup, Configuration,
+ 'stomp/ssl', NumAcceptors, "STOMP TLS listener").
+
+transport(Protocol) ->
+ case Protocol of
+ ?TCP_PROTOCOL -> ranch_tcp;
+ ?TLS_PROTOCOL -> ranch_ssl
+ end.
diff --git a/deps/rabbitmq_stomp/src/rabbit_stomp_util.erl b/deps/rabbitmq_stomp/src/rabbit_stomp_util.erl
new file mode 100644
index 0000000000..6df1affbb7
--- /dev/null
+++ b/deps/rabbitmq_stomp/src/rabbit_stomp_util.erl
@@ -0,0 +1,418 @@
+%% The contents of this file are subject to the Mozilla Public License
+%% Version 1.1 (the "License"); you may not use this file except in
+%% compliance with the License. You may obtain a copy of the License
+%% at https://www.mozilla.org/MPL/
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and
+%% limitations under the License.
+%%
+%% The Original Code is RabbitMQ.
+%%
+%% The Initial Developer of the Original Code is GoPivotal, Inc.
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stomp_util).
+
+-export([parse_message_id/1, subscription_queue_name/3]).
+-export([longstr_field/2]).
+-export([ack_mode/1, consumer_tag_reply_to/1, consumer_tag/1, message_headers/1,
+ headers_post_process/1, headers/5, message_properties/1, tag_to_id/1,
+ msg_header_name/1, ack_header_name/1, build_arguments/1, build_params/2,
+ has_durable_header/1]).
+-export([negotiate_version/2]).
+-export([trim_headers/1]).
+
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("amqp_client/include/rabbit_routing_prefixes.hrl").
+-include("rabbit_stomp_frame.hrl").
+-include("rabbit_stomp_headers.hrl").
+
+-define(INTERNAL_TAG_PREFIX, "T_").
+-define(QUEUE_TAG_PREFIX, "Q_").
+
+%%--------------------------------------------------------------------
+%% Frame and Header Parsing
+%%--------------------------------------------------------------------
+
+consumer_tag_reply_to(QueueId) ->
+ internal_tag(?TEMP_QUEUE_ID_PREFIX ++ QueueId).
+
+consumer_tag(Frame) ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_ID) of
+ {ok, Id} ->
+ case lists:prefix(?TEMP_QUEUE_ID_PREFIX, Id) of
+ false -> {ok, internal_tag(Id), "id='" ++ Id ++ "'"};
+ true -> {error, invalid_prefix}
+ end;
+ not_found ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_DESTINATION) of
+ {ok, DestHdr} ->
+ {ok, queue_tag(DestHdr),
+ "destination='" ++ DestHdr ++ "'"};
+ not_found ->
+ {error, missing_destination_header}
+ end
+ end.
+
+ack_mode(Frame) ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_ACK, "auto") of
+ "auto" -> {auto, false};
+ "client" -> {client, true};
+ "client-individual" -> {client, false}
+ end.
+
+message_properties(Frame = #stomp_frame{headers = Headers}) ->
+ BinH = fun(K) -> rabbit_stomp_frame:binary_header(Frame, K, undefined) end,
+ IntH = fun(K) -> rabbit_stomp_frame:integer_header(Frame, K, undefined) end,
+
+ DeliveryMode = case rabbit_stomp_frame:boolean_header(
+ Frame, ?HEADER_PERSISTENT, false) of
+ true -> 2;
+ false -> undefined
+ end,
+
+ #'P_basic'{ content_type = BinH(?HEADER_CONTENT_TYPE),
+ content_encoding = BinH(?HEADER_CONTENT_ENCODING),
+ headers = [longstr_field(K, V) ||
+ {K, V} <- Headers, user_header(K)],
+ delivery_mode = DeliveryMode,
+ priority = IntH(?HEADER_PRIORITY),
+ correlation_id = BinH(?HEADER_CORRELATION_ID),
+ reply_to = BinH(?HEADER_REPLY_TO),
+ expiration = BinH(?HEADER_EXPIRATION),
+ message_id = BinH(?HEADER_AMQP_MESSAGE_ID),
+ timestamp = IntH(?HEADER_TIMESTAMP),
+ type = BinH(?HEADER_TYPE),
+ user_id = BinH(?HEADER_USER_ID),
+ app_id = BinH(?HEADER_APP_ID) }.
+
+message_headers(Props = #'P_basic'{headers = Headers}) ->
+ adhoc_convert_headers(
+ Headers,
+ lists:foldl(fun({Header, Index}, Acc) ->
+ maybe_header(Header, element(Index, Props), Acc)
+ end, [],
+ [{?HEADER_CONTENT_TYPE, #'P_basic'.content_type},
+ {?HEADER_CONTENT_ENCODING, #'P_basic'.content_encoding},
+ {?HEADER_PERSISTENT, #'P_basic'.delivery_mode},
+ {?HEADER_PRIORITY, #'P_basic'.priority},
+ {?HEADER_CORRELATION_ID, #'P_basic'.correlation_id},
+ {?HEADER_REPLY_TO, #'P_basic'.reply_to},
+ {?HEADER_EXPIRATION, #'P_basic'.expiration},
+ {?HEADER_AMQP_MESSAGE_ID, #'P_basic'.message_id},
+ {?HEADER_TIMESTAMP, #'P_basic'.timestamp},
+ {?HEADER_TYPE, #'P_basic'.type},
+ {?HEADER_USER_ID, #'P_basic'.user_id},
+ {?HEADER_APP_ID, #'P_basic'.app_id}])).
+
+adhoc_convert_headers(undefined, Existing) ->
+ Existing;
+adhoc_convert_headers(Headers, Existing) ->
+ lists:foldr(fun ({K, longstr, V}, Acc) ->
+ [{binary_to_list(K), binary_to_list(V)} | Acc];
+ ({K, signedint, V}, Acc) ->
+ [{binary_to_list(K), integer_to_list(V)} | Acc];
+ (_, Acc) ->
+ Acc
+ end, Existing, Headers).
+
+headers_extra(SessionId, AckMode, Version,
+ #'basic.deliver'{consumer_tag = ConsumerTag,
+ delivery_tag = DeliveryTag,
+ exchange = ExchangeBin,
+ routing_key = RoutingKeyBin,
+ redelivered = Redelivered}) ->
+ case tag_to_id(ConsumerTag) of
+ {ok, {internal, Id}} -> [{?HEADER_SUBSCRIPTION, Id}];
+ _ -> []
+ end ++
+ [{?HEADER_DESTINATION,
+ format_destination(binary_to_list(ExchangeBin),
+ binary_to_list(RoutingKeyBin))},
+ {?HEADER_MESSAGE_ID,
+ create_message_id(ConsumerTag, SessionId, DeliveryTag)},
+ {?HEADER_REDELIVERED, Redelivered}] ++
+ case AckMode == client andalso Version == "1.2" of
+ true -> [{?HEADER_ACK,
+ create_message_id(ConsumerTag, SessionId, DeliveryTag)}];
+ false -> []
+ end.
+
+headers_post_process(Headers) ->
+ Prefixes = rabbit_routing_util:dest_prefixes(),
+ [case Header of
+ {?HEADER_REPLY_TO, V} ->
+ case lists:any(fun (P) -> lists:prefix(P, V) end, Prefixes) of
+ true -> {?HEADER_REPLY_TO, V};
+ false -> {?HEADER_REPLY_TO, ?REPLY_QUEUE_PREFIX ++ V}
+ end;
+ {_, _} ->
+ Header
+ end || Header <- Headers].
+
+headers(SessionId, Delivery, Properties, AckMode, Version) ->
+ headers_extra(SessionId, AckMode, Version, Delivery) ++
+ headers_post_process(message_headers(Properties)).
+
+tag_to_id(<<?INTERNAL_TAG_PREFIX, Id/binary>>) ->
+ {ok, {internal, binary_to_list(Id)}};
+tag_to_id(<<?QUEUE_TAG_PREFIX, Id/binary>>) ->
+ {ok, {queue, binary_to_list(Id)}};
+tag_to_id(Other) when is_binary(Other) ->
+ {error, {unknown, binary_to_list(Other)}}.
+
+user_header(Hdr)
+ when Hdr =:= ?HEADER_CONTENT_TYPE orelse
+ Hdr =:= ?HEADER_CONTENT_ENCODING orelse
+ Hdr =:= ?HEADER_PERSISTENT orelse
+ Hdr =:= ?HEADER_PRIORITY orelse
+ Hdr =:= ?HEADER_CORRELATION_ID orelse
+ Hdr =:= ?HEADER_REPLY_TO orelse
+ Hdr =:= ?HEADER_EXPIRATION orelse
+ Hdr =:= ?HEADER_AMQP_MESSAGE_ID orelse
+ Hdr =:= ?HEADER_TIMESTAMP orelse
+ Hdr =:= ?HEADER_TYPE orelse
+ Hdr =:= ?HEADER_USER_ID orelse
+ Hdr =:= ?HEADER_APP_ID orelse
+ Hdr =:= ?HEADER_DESTINATION ->
+ false;
+user_header(_) ->
+ true.
+
+parse_message_id(MessageId) ->
+ case split(MessageId, ?MESSAGE_ID_SEPARATOR) of
+ [ConsumerTag, SessionId, DeliveryTag] ->
+ {ok, {list_to_binary(ConsumerTag),
+ SessionId,
+ list_to_integer(DeliveryTag)}};
+ _ ->
+ {error, invalid_message_id}
+ end.
+
+negotiate_version(ClientVers, ServerVers) ->
+ Common = lists:filter(fun(Ver) ->
+ lists:member(Ver, ServerVers)
+ end, ClientVers),
+ case Common of
+ [] ->
+ {error, no_common_version};
+ [H|T] ->
+ {ok, lists:foldl(fun(Ver, AccN) ->
+ max_version(Ver, AccN)
+ end, H, T)}
+ end.
+
+max_version(V, V) ->
+ V;
+max_version(V1, V2) ->
+ Split = fun(X) -> re:split(X, "\\.", [{return, list}]) end,
+ find_max_version({V1, Split(V1)}, {V2, Split(V2)}).
+
+find_max_version({V1, [X|T1]}, {V2, [X|T2]}) ->
+ find_max_version({V1, T1}, {V2, T2});
+find_max_version({V1, [X]}, {V2, [Y]}) ->
+ case list_to_integer(X) >= list_to_integer(Y) of
+ true -> V1;
+ false -> V2
+ end;
+find_max_version({_V1, []}, {V2, Y}) when length(Y) > 0 ->
+ V2;
+find_max_version({V1, X}, {_V2, []}) when length(X) > 0 ->
+ V1.
+
+%% ---- Header processing helpers ----
+
+longstr_field(K, V) ->
+ {list_to_binary(K), longstr, list_to_binary(V)}.
+
+maybe_header(_Key, undefined, Acc) ->
+ Acc;
+maybe_header(?HEADER_PERSISTENT, 2, Acc) ->
+ [{?HEADER_PERSISTENT, "true"} | Acc];
+maybe_header(Key, Value, Acc) when is_binary(Value) ->
+ [{Key, binary_to_list(Value)} | Acc];
+maybe_header(Key, Value, Acc) when is_integer(Value) ->
+ [{Key, integer_to_list(Value)}| Acc];
+maybe_header(_Key, _Value, Acc) ->
+ Acc.
+
+create_message_id(ConsumerTag, SessionId, DeliveryTag) ->
+ [ConsumerTag,
+ ?MESSAGE_ID_SEPARATOR,
+ SessionId,
+ ?MESSAGE_ID_SEPARATOR,
+ integer_to_list(DeliveryTag)].
+
+trim_headers(Frame = #stomp_frame{headers = Hdrs}) ->
+ Frame#stomp_frame{headers = [{K, string:strip(V, left)} || {K, V} <- Hdrs]}.
+
+internal_tag(Base) ->
+ list_to_binary(?INTERNAL_TAG_PREFIX ++ Base).
+
+queue_tag(Base) ->
+ list_to_binary(?QUEUE_TAG_PREFIX ++ Base).
+
+ack_header_name("1.2") -> ?HEADER_ID;
+ack_header_name("1.1") -> ?HEADER_MESSAGE_ID;
+ack_header_name("1.0") -> ?HEADER_MESSAGE_ID.
+
+msg_header_name("1.2") -> ?HEADER_ACK;
+msg_header_name("1.1") -> ?HEADER_MESSAGE_ID;
+msg_header_name("1.0") -> ?HEADER_MESSAGE_ID.
+
+build_arguments(Headers) ->
+ Arguments =
+ lists:foldl(fun({K, V}, Acc) ->
+ case lists:member(K, ?HEADER_ARGUMENTS) of
+ true -> [build_argument(K, V) | Acc];
+ false -> Acc
+ end
+ end,
+ [],
+ Headers),
+ {arguments, Arguments}.
+
+build_argument(?HEADER_X_DEAD_LETTER_EXCHANGE, Val) ->
+ {list_to_binary(?HEADER_X_DEAD_LETTER_EXCHANGE), longstr,
+ list_to_binary(string:strip(Val))};
+build_argument(?HEADER_X_DEAD_LETTER_ROUTING_KEY, Val) ->
+ {list_to_binary(?HEADER_X_DEAD_LETTER_ROUTING_KEY), longstr,
+ list_to_binary(string:strip(Val))};
+build_argument(?HEADER_X_EXPIRES, Val) ->
+ {list_to_binary(?HEADER_X_EXPIRES), long,
+ list_to_integer(string:strip(Val))};
+build_argument(?HEADER_X_MAX_LENGTH, Val) ->
+ {list_to_binary(?HEADER_X_MAX_LENGTH), long,
+ list_to_integer(string:strip(Val))};
+build_argument(?HEADER_X_MAX_LENGTH_BYTES, Val) ->
+ {list_to_binary(?HEADER_X_MAX_LENGTH_BYTES), long,
+ list_to_integer(string:strip(Val))};
+build_argument(?HEADER_X_MAX_PRIORITY, Val) ->
+ {list_to_binary(?HEADER_X_MAX_PRIORITY), long,
+ list_to_integer(string:strip(Val))};
+build_argument(?HEADER_X_MESSAGE_TTL, Val) ->
+ {list_to_binary(?HEADER_X_MESSAGE_TTL), long,
+ list_to_integer(string:strip(Val))};
+build_argument(?HEADER_X_QUEUE_TYPE, Val) ->
+ {list_to_binary(?HEADER_X_QUEUE_TYPE), longstr,
+ list_to_binary(string:strip(Val))}.
+
+build_params(EndPoint, Headers) ->
+ Params = lists:foldl(fun({K, V}, Acc) ->
+ case lists:member(K, ?HEADER_PARAMS) of
+ true -> [build_param(K, V) | Acc];
+ false -> Acc
+ end
+ end,
+ [],
+ Headers),
+ rabbit_misc:plmerge(Params, default_params(EndPoint)).
+
+build_param(?HEADER_PERSISTENT, Val) ->
+ {durable, string_to_boolean(Val)};
+
+build_param(?HEADER_DURABLE, Val) ->
+ {durable, string_to_boolean(Val)};
+
+build_param(?HEADER_AUTO_DELETE, Val) ->
+ {auto_delete, string_to_boolean(Val)};
+
+build_param(?HEADER_EXCLUSIVE, Val) ->
+ {exclusive, string_to_boolean(Val)}.
+
+default_params({queue, _}) ->
+ [{durable, true}];
+
+default_params({exchange, _}) ->
+ [{exclusive, false}, {auto_delete, true}];
+
+default_params({topic, _}) ->
+ [{exclusive, false}, {auto_delete, true}];
+
+default_params(_) ->
+ [{durable, false}].
+
+string_to_boolean("True") ->
+ true;
+string_to_boolean("true") ->
+ true;
+string_to_boolean("False") ->
+ false;
+string_to_boolean("false") ->
+ false;
+string_to_boolean(_) ->
+ undefined.
+
+has_durable_header(Frame) ->
+ rabbit_stomp_frame:boolean_header(
+ Frame, ?HEADER_DURABLE, false) or
+ rabbit_stomp_frame:boolean_header(
+ Frame, ?HEADER_PERSISTENT, false).
+
+%%--------------------------------------------------------------------
+%% Destination Formatting
+%%--------------------------------------------------------------------
+
+format_destination("", RoutingKey) ->
+ ?QUEUE_PREFIX ++ "/" ++ escape(RoutingKey);
+format_destination("amq.topic", RoutingKey) ->
+ ?TOPIC_PREFIX ++ "/" ++ escape(RoutingKey);
+format_destination(Exchange, "") ->
+ ?EXCHANGE_PREFIX ++ "/" ++ escape(Exchange);
+format_destination(Exchange, RoutingKey) ->
+ ?EXCHANGE_PREFIX ++ "/" ++ escape(Exchange) ++ "/" ++ escape(RoutingKey).
+
+%%--------------------------------------------------------------------
+%% Destination Parsing
+%%--------------------------------------------------------------------
+
+subscription_queue_name(Destination, SubscriptionId, Frame) ->
+ case rabbit_stomp_frame:header(Frame, ?HEADER_X_QUEUE_NAME, undefined) of
+ undefined ->
+ %% We need a queue name that a) can be derived from the
+ %% Destination and SubscriptionId, and b) meets the constraints on
+ %% AMQP queue names. It doesn't need to be secure; we use md5 here
+ %% simply as a convenient means to bound the length.
+ rabbit_guid:string(
+ erlang:md5(
+ term_to_binary_compat:term_to_binary_1(
+ {Destination, SubscriptionId})),
+ "stomp-subscription");
+ Name ->
+ Name
+ end.
+
+%% ---- Helpers ----
+
+split([], _Splitter) -> [];
+split(Content, Splitter) -> split(Content, [], [], Splitter).
+
+split([], RPart, RParts, _Splitter) ->
+ lists:reverse([lists:reverse(RPart) | RParts]);
+split(Content = [Elem | Rest1], RPart, RParts, Splitter) ->
+ case take_prefix(Splitter, Content) of
+ {ok, Rest2} ->
+ split(Rest2, [], [lists:reverse(RPart) | RParts], Splitter);
+ not_found ->
+ split(Rest1, [Elem | RPart], RParts, Splitter)
+ end.
+
+take_prefix([Char | Prefix], [Char | List]) -> take_prefix(Prefix, List);
+take_prefix([], List) -> {ok, List};
+take_prefix(_Prefix, _List) -> not_found.
+
+escape(Str) -> escape(Str, []).
+
+escape([$/ | Str], Acc) -> escape(Str, "F2%" ++ Acc); %% $/ == '2F'x
+escape([$% | Str], Acc) -> escape(Str, "52%" ++ Acc); %% $% == '25'x
+escape([X | Str], Acc) when X < 32 orelse X > 127 ->
+ escape(Str, revhex(X) ++ "%" ++ Acc);
+escape([C | Str], Acc) -> escape(Str, [C | Acc]);
+escape([], Acc) -> lists:reverse(Acc).
+
+revhex(I) -> hexdig(I) ++ hexdig(I bsr 4).
+
+hexdig(I) -> erlang:integer_to_list(I band 15, 16).
diff --git a/deps/rabbitmq_stomp/test/amqqueue_SUITE.erl b/deps/rabbitmq_stomp/test/amqqueue_SUITE.erl
new file mode 100644
index 0000000000..0474fd67d6
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/amqqueue_SUITE.erl
@@ -0,0 +1,319 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(amqqueue_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include("rabbit_stomp.hrl").
+-include("rabbit_stomp_frame.hrl").
+-include("rabbit_stomp_headers.hrl").
+
+-define(QUEUE, <<"TestQueue">>).
+-define(DESTINATION, "/amq/queue/TestQueue").
+
+all() ->
+ [{group, version_to_group_name(V)} || V <- ?SUPPORTED_VERSIONS].
+
+groups() ->
+ Tests = [
+ publish_no_dest_error,
+ publish_unauthorized_error,
+ subscribe_error,
+ subscribe,
+ unsubscribe_ack,
+ subscribe_ack,
+ send,
+ delete_queue_subscribe,
+ temp_destination_queue,
+ temp_destination_in_send,
+ blank_destination_in_send
+ ],
+
+ [{version_to_group_name(V), [sequence], Tests}
+ || V <- ?SUPPORTED_VERSIONS].
+
+version_to_group_name(V) ->
+ list_to_atom(re:replace("version_" ++ V,
+ "\\.",
+ "_",
+ [global, {return, list}])).
+
+init_per_suite(Config) ->
+ Config1 = rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodename_suffix, ?MODULE}]),
+ rabbit_ct_helpers:log_environment(),
+ rabbit_ct_helpers:run_setup_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps()).
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ rabbit_ct_broker_helpers:teardown_steps()).
+
+init_per_group(Group, Config) ->
+ Suffix = string:sub_string(atom_to_list(Group), 9),
+ Version = re:replace(Suffix, "_", ".", [global, {return, list}]),
+ rabbit_ct_helpers:set_config(Config, [{version, Version}]).
+
+end_per_group(_Group, Config) -> Config.
+
+init_per_testcase(TestCase, Config) ->
+ Version = ?config(version, Config),
+ StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
+ {ok, Connection} = amqp_connection:start(#amqp_params_direct{
+ node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename)
+ }),
+ {ok, Channel} = amqp_connection:open_channel(Connection),
+ {ok, Client} = rabbit_stomp_client:connect(Version, StompPort),
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {amqp_connection, Connection},
+ {amqp_channel, Channel},
+ {stomp_client, Client}
+ ]),
+ init_per_testcase0(TestCase, Config1).
+
+end_per_testcase(TestCase, Config) ->
+ Connection = ?config(amqp_connection, Config),
+ Channel = ?config(amqp_channel, Config),
+ Client = ?config(stomp_client, Config),
+ rabbit_stomp_client:disconnect(Client),
+ amqp_channel:close(Channel),
+ amqp_connection:close(Connection),
+ end_per_testcase0(TestCase, Config).
+
+init_per_testcase0(publish_unauthorized_error, Config) ->
+ Channel = ?config(amqp_channel, Config),
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Channel, #'queue.declare'{queue = <<"RestrictedQueue">>,
+ auto_delete = true}),
+
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, add_user,
+ [<<"user">>, <<"pass">>, <<"acting-user">>]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, set_permissions, [
+ <<"user">>, <<"/">>, <<"nothing">>, <<"nothing">>, <<"nothing">>, <<"acting-user">>]),
+ Version = ?config(version, Config),
+ StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
+ {ok, ClientFoo} = rabbit_stomp_client:connect(Version, "user", "pass", StompPort),
+ rabbit_ct_helpers:set_config(Config, [{client_foo, ClientFoo}]);
+init_per_testcase0(_, Config) ->
+ Config.
+
+end_per_testcase0(publish_unauthorized_error, Config) ->
+ ClientFoo = ?config(client_foo, Config),
+ rabbit_stomp_client:disconnect(ClientFoo),
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, delete_user,
+ [<<"user">>, <<"acting-user">>]),
+ Config;
+end_per_testcase0(_, Config) ->
+ Config.
+
+publish_no_dest_error(Config) ->
+ Client = ?config(stomp_client, Config),
+ rabbit_stomp_client:send(
+ Client, "SEND", [{"destination", "/exchange/non-existent"}], ["hello"]),
+ {ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
+ "not_found" = proplists:get_value("message", Hdrs),
+ ok.
+
+publish_unauthorized_error(Config) ->
+ ClientFoo = ?config(client_foo, Config),
+ rabbit_stomp_client:send(
+ ClientFoo, "SEND", [{"destination", "/amq/queue/RestrictedQueue"}], ["hello"]),
+ {ok, _Client1, Hdrs, _} = stomp_receive(ClientFoo, "ERROR"),
+ "access_refused" = proplists:get_value("message", Hdrs),
+ ok.
+
+subscribe_error(Config) ->
+ Client = ?config(stomp_client, Config),
+ %% SUBSCRIBE to missing queue
+ rabbit_stomp_client:send(
+ Client, "SUBSCRIBE", [{"destination", ?DESTINATION}]),
+ {ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
+ "not_found" = proplists:get_value("message", Hdrs),
+ ok.
+
+subscribe(Config) ->
+ Channel = ?config(amqp_channel, Config),
+ Client = ?config(stomp_client, Config),
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
+ auto_delete = true}),
+
+ %% subscribe and wait for receipt
+ rabbit_stomp_client:send(
+ Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "foo"}]),
+ {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
+
+ %% send from amqp
+ Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
+
+ amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
+ payload = <<"hello">>}),
+
+ {ok, _Client2, _, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
+ ok.
+
+unsubscribe_ack(Config) ->
+ Channel = ?config(amqp_channel, Config),
+ Client = ?config(stomp_client, Config),
+ Version = ?config(version, Config),
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
+ auto_delete = true}),
+ %% subscribe and wait for receipt
+ rabbit_stomp_client:send(
+ Client, "SUBSCRIBE", [{"destination", ?DESTINATION},
+ {"receipt", "rcpt1"},
+ {"ack", "client"},
+ {"id", "subscription-id"}]),
+ {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
+
+ %% send from amqp
+ Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
+
+ amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
+ payload = <<"hello">>}),
+
+ {ok, Client2, Hdrs1, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
+
+ rabbit_stomp_client:send(
+ Client2, "UNSUBSCRIBE", [{"destination", ?DESTINATION},
+ {"id", "subscription-id"}]),
+
+ rabbit_stomp_client:send(
+ Client2, "ACK", [{rabbit_stomp_util:ack_header_name(Version),
+ proplists:get_value(
+ rabbit_stomp_util:msg_header_name(Version), Hdrs1)},
+ {"receipt", "rcpt2"}]),
+
+ {ok, _Client3, Hdrs2, _Body2} = stomp_receive(Client2, "ERROR"),
+ ?assertEqual("Subscription not found",
+ proplists:get_value("message", Hdrs2)),
+ ok.
+
+subscribe_ack(Config) ->
+ Channel = ?config(amqp_channel, Config),
+ Client = ?config(stomp_client, Config),
+ Version = ?config(version, Config),
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
+ auto_delete = true}),
+
+ %% subscribe and wait for receipt
+ rabbit_stomp_client:send(
+ Client, "SUBSCRIBE", [{"destination", ?DESTINATION},
+ {"receipt", "foo"},
+ {"ack", "client"}]),
+ {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
+
+ %% send from amqp
+ Method = #'basic.publish'{exchange = <<"">>, routing_key = ?QUEUE},
+
+ amqp_channel:call(Channel, Method, #amqp_msg{props = #'P_basic'{},
+ payload = <<"hello">>}),
+
+ {ok, _Client2, Headers, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
+ false = (Version == "1.2") xor proplists:is_defined(?HEADER_ACK, Headers),
+
+ MsgHeader = rabbit_stomp_util:msg_header_name(Version),
+ AckValue = proplists:get_value(MsgHeader, Headers),
+ AckHeader = rabbit_stomp_util:ack_header_name(Version),
+
+ rabbit_stomp_client:send(Client, "ACK", [{AckHeader, AckValue}]),
+ #'basic.get_empty'{} =
+ amqp_channel:call(Channel, #'basic.get'{queue = ?QUEUE}),
+ ok.
+
+send(Config) ->
+ Channel = ?config(amqp_channel, Config),
+ Client = ?config(stomp_client, Config),
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
+ auto_delete = true}),
+
+ %% subscribe and wait for receipt
+ rabbit_stomp_client:send(
+ Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "foo"}]),
+ {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
+
+ %% send from stomp
+ rabbit_stomp_client:send(
+ Client1, "SEND", [{"destination", ?DESTINATION}], ["hello"]),
+
+ {ok, _Client2, _, [<<"hello">>]} = stomp_receive(Client1, "MESSAGE"),
+ ok.
+
+delete_queue_subscribe(Config) ->
+ Channel = ?config(amqp_channel, Config),
+ Client = ?config(stomp_client, Config),
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
+ auto_delete = true}),
+
+ %% subscribe and wait for receipt
+ rabbit_stomp_client:send(
+ Client, "SUBSCRIBE", [{"destination", ?DESTINATION}, {"receipt", "bah"}]),
+ {ok, Client1, _, _} = stomp_receive(Client, "RECEIPT"),
+
+ %% delete queue while subscribed
+ #'queue.delete_ok'{} =
+ amqp_channel:call(Channel, #'queue.delete'{queue = ?QUEUE}),
+
+ {ok, _Client2, Headers, _} = stomp_receive(Client1, "ERROR"),
+
+ ?DESTINATION = proplists:get_value("subscription", Headers),
+
+ % server closes connection
+ ok.
+
+temp_destination_queue(Config) ->
+ Channel = ?config(amqp_channel, Config),
+ Client = ?config(stomp_client, Config),
+ #'queue.declare_ok'{} =
+ amqp_channel:call(Channel, #'queue.declare'{queue = ?QUEUE,
+ auto_delete = true}),
+ rabbit_stomp_client:send( Client, "SEND", [{"destination", ?DESTINATION},
+ {"reply-to", "/temp-queue/foo"}],
+ ["ping"]),
+ amqp_channel:call(Channel,#'basic.consume'{queue = ?QUEUE, no_ack = true}),
+ receive #'basic.consume_ok'{consumer_tag = _Tag} -> ok end,
+ ReplyTo = receive {#'basic.deliver'{delivery_tag = _DTag},
+ #'amqp_msg'{payload = <<"ping">>,
+ props = #'P_basic'{reply_to = RT}}} -> RT
+ end,
+ ok = amqp_channel:call(Channel,
+ #'basic.publish'{routing_key = ReplyTo},
+ #amqp_msg{payload = <<"pong">>}),
+ {ok, _Client1, _, [<<"pong">>]} = stomp_receive(Client, "MESSAGE"),
+ ok.
+
+temp_destination_in_send(Config) ->
+ Client = ?config(stomp_client, Config),
+ rabbit_stomp_client:send( Client, "SEND", [{"destination", "/temp-queue/foo"}],
+ ["poing"]),
+ {ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
+ "Invalid destination" = proplists:get_value("message", Hdrs),
+ ok.
+
+blank_destination_in_send(Config) ->
+ Client = ?config(stomp_client, Config),
+ rabbit_stomp_client:send( Client, "SEND", [{"destination", ""}],
+ ["poing"]),
+ {ok, _Client1, Hdrs, _} = stomp_receive(Client, "ERROR"),
+ "Invalid destination" = proplists:get_value("message", Hdrs),
+ ok.
+
+stomp_receive(Client, Command) ->
+ {#stomp_frame{command = Command,
+ headers = Hdrs,
+ body_iolist = Body}, Client1} =
+ rabbit_stomp_client:recv(Client),
+ {ok, Client1, Hdrs, Body}.
+
diff --git a/deps/rabbitmq_stomp/test/command_SUITE.erl b/deps/rabbitmq_stomp/test/command_SUITE.erl
new file mode 100644
index 0000000000..8fe9fa0d0f
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/command_SUITE.erl
@@ -0,0 +1,127 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(command_SUITE).
+-compile([export_all]).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include("rabbit_stomp.hrl").
+
+
+-define(COMMAND, 'Elixir.RabbitMQ.CLI.Ctl.Commands.ListStompConnectionsCommand').
+
+all() ->
+ [
+ {group, non_parallel_tests}
+ ].
+
+groups() ->
+ [
+ {non_parallel_tests, [], [
+ merge_defaults,
+ run
+ ]}
+ ].
+
+init_per_suite(Config) ->
+ Config1 = rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodename_suffix, ?MODULE}]),
+ rabbit_ct_helpers:log_environment(),
+ rabbit_ct_helpers:run_setup_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps()).
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ rabbit_ct_broker_helpers:teardown_steps()).
+
+init_per_group(_, Config) ->
+ Config.
+
+end_per_group(_, Config) ->
+ Config.
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase).
+
+end_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_finished(Config, Testcase).
+
+merge_defaults(_Config) ->
+ {[<<"session_id">>, <<"conn_name">>], #{verbose := false}} =
+ ?COMMAND:merge_defaults([], #{}),
+
+ {[<<"other_key">>], #{verbose := true}} =
+ ?COMMAND:merge_defaults([<<"other_key">>], #{verbose => true}),
+
+ {[<<"other_key">>], #{verbose := false}} =
+ ?COMMAND:merge_defaults([<<"other_key">>], #{verbose => false}).
+
+
+run(Config) ->
+
+ Node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ Opts = #{node => Node, timeout => 10000, verbose => false},
+
+ %% No connections
+ [] = 'Elixir.Enum':to_list(?COMMAND:run([], Opts)),
+
+ StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
+
+ {ok, _Client} = rabbit_stomp_client:connect(StompPort),
+ ct:sleep(100),
+
+ [[{session_id, _}]] =
+ 'Elixir.Enum':to_list(?COMMAND:run([<<"session_id">>], Opts)),
+
+
+ {ok, _Client2} = rabbit_stomp_client:connect(StompPort),
+ ct:sleep(100),
+
+ [[{session_id, _}], [{session_id, _}]] =
+ 'Elixir.Enum':to_list(?COMMAND:run([<<"session_id">>], Opts)),
+
+ Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp),
+ start_amqp_connection(network, Node, Port),
+
+ %% There are still just two connections
+ [[{session_id, _}], [{session_id, _}]] =
+ 'Elixir.Enum':to_list(?COMMAND:run([<<"session_id">>], Opts)),
+
+ start_amqp_connection(direct, Node, Port),
+
+ %% Still two MQTT connections, one direct AMQP 0-9-1 connection
+ [[{session_id, _}], [{session_id, _}]] =
+ 'Elixir.Enum':to_list(?COMMAND:run([<<"session_id">>], Opts)),
+
+ %% Verbose returns all keys
+ Infos = lists:map(fun(El) -> atom_to_binary(El, utf8) end, ?INFO_ITEMS),
+ AllKeys = 'Elixir.Enum':to_list(?COMMAND:run(Infos, Opts)),
+ AllKeys = 'Elixir.Enum':to_list(?COMMAND:run([], Opts#{verbose => true})),
+
+ %% There are two connections
+ [First, _Second] = AllKeys,
+
+ %% Keys are INFO_ITEMS
+ KeysCount = length(?INFO_ITEMS),
+ KeysCount = length(First),
+
+ {Keys, _} = lists:unzip(First),
+
+ [] = Keys -- ?INFO_ITEMS,
+ [] = ?INFO_ITEMS -- Keys.
+
+
+start_amqp_connection(Type, Node, Port) ->
+ Params = amqp_params(Type, Node, Port),
+ {ok, _Connection} = amqp_connection:start(Params).
+
+amqp_params(network, _, Port) ->
+ #amqp_params_network{port = Port};
+amqp_params(direct, Node, _) ->
+ #amqp_params_direct{node = Node}.
diff --git a/deps/rabbitmq_stomp/test/config_schema_SUITE.erl b/deps/rabbitmq_stomp/test/config_schema_SUITE.erl
new file mode 100644
index 0000000000..8d340810f7
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/config_schema_SUITE.erl
@@ -0,0 +1,55 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(config_schema_SUITE).
+
+-compile(export_all).
+
+all() ->
+ [
+ run_snippets
+ ].
+
+%% -------------------------------------------------------------------
+%% Testsuite setup/teardown.
+%% -------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ rabbit_ct_helpers:log_environment(),
+ Config1 = rabbit_ct_helpers:run_setup_steps(Config),
+ rabbit_ct_config_schema:init_schemas(rabbitmq_stomp, Config1).
+
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config).
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase),
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_nodename_suffix, Testcase}
+ ]),
+ rabbit_ct_helpers:run_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps()).
+
+end_per_testcase(Testcase, Config) ->
+ Config1 = rabbit_ct_helpers:run_steps(Config,
+ rabbit_ct_client_helpers:teardown_steps() ++
+ rabbit_ct_broker_helpers:teardown_steps()),
+ rabbit_ct_helpers:testcase_finished(Config1, Testcase).
+
+%% -------------------------------------------------------------------
+%% Testcases.
+%% -------------------------------------------------------------------
+
+run_snippets(Config) ->
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0,
+ ?MODULE, run_snippets1, [Config]).
+
+run_snippets1(Config) ->
+ rabbit_ct_config_schema:run_snippets(Config).
+
diff --git a/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/cacert.pem b/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/cacert.pem
new file mode 100644
index 0000000000..eaf6b67806
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/cacert.pem
@@ -0,0 +1 @@
+I'm not a certificate
diff --git a/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/cert.pem b/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/cert.pem
new file mode 100644
index 0000000000..eaf6b67806
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/cert.pem
@@ -0,0 +1 @@
+I'm not a certificate
diff --git a/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/key.pem b/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/key.pem
new file mode 100644
index 0000000000..eaf6b67806
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/config_schema_SUITE_data/certs/key.pem
@@ -0,0 +1 @@
+I'm not a certificate
diff --git a/deps/rabbitmq_stomp/test/config_schema_SUITE_data/rabbitmq_stomp.snippets b/deps/rabbitmq_stomp/test/config_schema_SUITE_data/rabbitmq_stomp.snippets
new file mode 100644
index 0000000000..6081240c68
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/config_schema_SUITE_data/rabbitmq_stomp.snippets
@@ -0,0 +1,97 @@
+[{listener_port,
+ "stomp.listeners.tcp.1 = 12345",
+ [{rabbitmq_stomp,[{tcp_listeners,[12345]}]}],
+ [rabbitmq_stomp]},
+ {listeners_ip,
+ "stomp.listeners.tcp.1 = 127.0.0.1:61613
+ stomp.listeners.tcp.2 = ::1:61613",
+ [{rabbitmq_stomp,[{tcp_listeners,[{"127.0.0.1",61613},{"::1",61613}]}]}],
+ [rabbitmq_stomp]},
+
+ {listener_tcp_options,
+ "stomp.listeners.tcp.1 = 127.0.0.1:61613
+ stomp.listeners.tcp.2 = ::1:61613
+
+ stomp.tcp_listen_options.backlog = 2048
+ stomp.tcp_listen_options.recbuf = 8192
+ stomp.tcp_listen_options.sndbuf = 8192
+
+ stomp.tcp_listen_options.keepalive = true
+ stomp.tcp_listen_options.nodelay = true
+
+ stomp.tcp_listen_options.exit_on_close = true
+
+ stomp.tcp_listen_options.send_timeout = 120
+",
+ [{rabbitmq_stomp,[
+ {tcp_listeners,[
+ {"127.0.0.1",61613},
+ {"::1",61613}
+ ]}
+ , {tcp_listen_options, [
+ {backlog, 2048},
+ {exit_on_close, true},
+
+ {recbuf, 8192},
+ {sndbuf, 8192},
+
+ {send_timeout, 120},
+
+ {keepalive, true},
+ {nodelay, true}
+ ]}
+ ]}],
+ [rabbitmq_stomp]},
+
+ {ssl,
+ "ssl_options.cacertfile = test/config_schema_SUITE_data/certs/cacert.pem
+ ssl_options.certfile = test/config_schema_SUITE_data/certs/cert.pem
+ ssl_options.keyfile = test/config_schema_SUITE_data/certs/key.pem
+ ssl_options.verify = verify_peer
+ ssl_options.fail_if_no_peer_cert = true
+
+ stomp.listeners.tcp.1 = 61613
+ stomp.listeners.ssl.1 = 61614",
+ [{rabbit,
+ [{ssl_options,
+ [{cacertfile,"test/config_schema_SUITE_data/certs/cacert.pem"},
+ {certfile,"test/config_schema_SUITE_data/certs/cert.pem"},
+ {keyfile,"test/config_schema_SUITE_data/certs/key.pem"},
+ {verify,verify_peer},
+ {fail_if_no_peer_cert,true}]}]},
+ {rabbitmq_stomp,[{tcp_listeners,[61613]},{ssl_listeners,[61614]}]}],
+ [rabbitmq_stomp]},
+ {defaults,
+ "stomp.default_user = guest
+ stomp.default_pass = guest
+ stomp.proxy_protocol = false
+ stomp.hide_server_info = false",
+ [{rabbitmq_stomp,[{default_user,[{login,"guest"},{passcode,"guest"}]},
+ {proxy_protocol,false},{hide_server_info,false}]}],
+ [rabbitmq_stomp]},
+ {ssl_cert_login,
+ "stomp.ssl_cert_login = true",
+ [{rabbitmq_stomp,[{ssl_cert_login,true}]}],
+ [rabbitmq_stomp]},
+ {proxy_protocol,
+ "stomp.default_user = guest
+ stomp.default_pass = guest
+ stomp.implicit_connect = true
+ stomp.proxy_protocol = true",
+ [{rabbitmq_stomp,[{default_user,[{login,"guest"},{passcode,"guest"}]},
+ {implicit_connect,true},
+ {proxy_protocol,true}]}],
+ [rabbitmq_stomp]},
+ {default_vhost,
+ "stomp.default_vhost = /",
+ [{rabbitmq_stomp,[{default_vhost,<<"/">>}]}],
+ [rabbitmq_stomp]},
+ {default_topic_exchange,
+ "stomp.default_topic_exchange = my.fancy.topic",
+ [{rabbitmq_stomp,[{default_topic_exchange,<<"my.fancy.topic">>}]}],
+ [rabbitmq_stomp]},
+ {hide_server_info,
+ "stomp.hide_server_info = true",
+ [{rabbitmq_stomp,[{hide_server_info,true}]}],
+ [rabbitmq_stomp]}
+].
diff --git a/deps/rabbitmq_stomp/test/connections_SUITE.erl b/deps/rabbitmq_stomp/test/connections_SUITE.erl
new file mode 100644
index 0000000000..4f9b027bb9
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/connections_SUITE.erl
@@ -0,0 +1,160 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(connections_SUITE).
+-compile(export_all).
+
+-import(rabbit_misc, [pget/2]).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include("rabbit_stomp_frame.hrl").
+-define(DESTINATION, "/queue/bulk-test").
+
+all() ->
+ [
+ messages_not_dropped_on_disconnect,
+ direct_client_connections_are_not_leaked,
+ stats_are_not_leaked,
+ stats,
+ heartbeat
+ ].
+
+merge_app_env(Config) ->
+ rabbit_ct_helpers:merge_app_env(Config,
+ {rabbit, [
+ {collect_statistics, basic},
+ {collect_statistics_interval, 100}
+ ]}).
+
+init_per_suite(Config) ->
+ Config1 = rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodename_suffix, ?MODULE}]),
+ rabbit_ct_helpers:log_environment(),
+ rabbit_ct_helpers:run_setup_steps(Config1,
+ [ fun merge_app_env/1 ] ++
+ rabbit_ct_broker_helpers:setup_steps()).
+
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ rabbit_ct_broker_helpers:teardown_steps()).
+
+-define(GARBAGE, <<"bdaf63dda9d78b075c748b740e7c3510ad203b07\nbdaf63dd">>).
+
+count_connections(Config) ->
+ StompPort = get_stomp_port(Config),
+ %% The default port is 61613 but it's in the middle of the ephemeral
+ %% ports range on many operating systems. Therefore, there is a
+ %% chance this port is already in use. Let's use a port close to the
+ %% AMQP default port.
+ IPv4Count = try
+ %% Count IPv4 connections. On some platforms, the IPv6 listener
+ %% implicitely listens to IPv4 connections too so the IPv4
+ %% listener doesn't exist. Thus this try/catch. This is the case
+ %% with Linux where net.ipv6.bindv6only is disabled (default in
+ %% most cases).
+ rpc_count_connections(Config, {acceptor, {0,0,0,0}, StompPort})
+ catch
+ _:{badarg, _} -> 0;
+ _:Other -> exit({foo, Other})
+ end,
+ IPv6Count = try
+ %% Count IPv6 connections. We also use a try/catch block in case
+ %% the host is not configured for IPv6.
+ rpc_count_connections(Config, {acceptor, {0,0,0,0,0,0,0,0}, StompPort})
+ catch
+ _:{badarg, _} -> 0;
+ _:Other1 -> exit({foo, Other1})
+ end,
+ IPv4Count + IPv6Count.
+
+rpc_count_connections(Config, ConnSpec) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0,
+ ranch_server, count_connections, [ConnSpec]).
+
+direct_client_connections_are_not_leaked(Config) ->
+ StompPort = get_stomp_port(Config),
+ N = count_connections(Config),
+ lists:foreach(fun (_) ->
+ {ok, Client = {Socket, _}} = rabbit_stomp_client:connect(StompPort),
+ %% send garbage which trips up the parser
+ gen_tcp:send(Socket, ?GARBAGE),
+ rabbit_stomp_client:send(
+ Client, "LOL", [{"", ""}])
+ end,
+ lists:seq(1, 100)),
+ timer:sleep(5000),
+ N = count_connections(Config),
+ ok.
+
+messages_not_dropped_on_disconnect(Config) ->
+ StompPort = get_stomp_port(Config),
+ N = count_connections(Config),
+ {ok, Client} = rabbit_stomp_client:connect(StompPort),
+ N1 = N + 1,
+ N1 = count_connections(Config),
+ [rabbit_stomp_client:send(
+ Client, "SEND", [{"destination", ?DESTINATION}],
+ [integer_to_list(Count)]) || Count <- lists:seq(1, 1000)],
+ rabbit_stomp_client:disconnect(Client),
+ QName = rabbit_misc:r(<<"/">>, queue, <<"bulk-test">>),
+ timer:sleep(3000),
+ N = count_connections(Config),
+ {ok, Q} = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue, lookup, [QName]),
+ Messages = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_amqqueue, info, [Q, [messages]]),
+ 1000 = pget(messages, Messages),
+ ok.
+
+get_stomp_port(Config) ->
+ rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp).
+
+stats_are_not_leaked(Config) ->
+ StompPort = get_stomp_port(Config),
+ N = rabbit_ct_broker_helpers:rpc(Config, 0, ets, info, [connection_metrics, size]),
+ {ok, C} = gen_tcp:connect("localhost", StompPort, []),
+ Bin = <<"GET / HTTP/1.1\r\nHost: www.rabbitmq.com\r\nUser-Agent: curl/7.43.0\r\nAccept: */*\n\n">>,
+ gen_tcp:send(C, Bin),
+ gen_tcp:close(C),
+ timer:sleep(1000), %% Wait for stats to be emitted, which it does every 100ms
+ N = rabbit_ct_broker_helpers:rpc(Config, 0, ets, info, [connection_metrics, size]),
+ ok.
+
+stats(Config) ->
+ StompPort = get_stomp_port(Config),
+ {ok, Client} = rabbit_stomp_client:connect(StompPort),
+ timer:sleep(1000), %% Wait for stats to be emitted, which it does every 100ms
+ %% Retrieve the connection Pid
+ [Reader] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_stomp, list, []),
+ [{_, Pid}] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_stomp_reader,
+ info, [Reader, [connection]]),
+ %% Verify the content of the metrics, garbage_collection must be present
+ [{Pid, Props}] = rabbit_ct_broker_helpers:rpc(Config, 0, ets, lookup,
+ [connection_metrics, Pid]),
+ true = proplists:is_defined(garbage_collection, Props),
+ 0 = proplists:get_value(timeout, Props),
+ %% If the coarse entry is present, stats were successfully emitted
+ [{Pid, _, _, _, _}] = rabbit_ct_broker_helpers:rpc(Config, 0, ets, lookup,
+ [connection_coarse_metrics, Pid]),
+ rabbit_stomp_client:disconnect(Client),
+ ok.
+
+heartbeat(Config) ->
+ StompPort = get_stomp_port(Config),
+ {ok, Client} = rabbit_stomp_client:connect("1.2", "guest", "guest", StompPort,
+ [{"heart-beat", "5000,7000"}]),
+ timer:sleep(1000), %% Wait for stats to be emitted, which it does every 100ms
+ %% Retrieve the connection Pid
+ [Reader] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_stomp, list, []),
+ [{_, Pid}] = rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_stomp_reader,
+ info, [Reader, [connection]]),
+ %% Verify the content of the heartbeat timeout
+ [{Pid, Props}] = rabbit_ct_broker_helpers:rpc(Config, 0, ets, lookup,
+ [connection_metrics, Pid]),
+ 5 = proplists:get_value(timeout, Props),
+ rabbit_stomp_client:disconnect(Client),
+ ok.
diff --git a/deps/rabbitmq_stomp/test/frame_SUITE.erl b/deps/rabbitmq_stomp/test/frame_SUITE.erl
new file mode 100644
index 0000000000..da191ac12a
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/frame_SUITE.erl
@@ -0,0 +1,191 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(frame_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include("rabbit_stomp_frame.hrl").
+-include("rabbit_stomp_headers.hrl").
+-compile(export_all).
+
+all() ->
+ [
+ parse_simple_frame,
+ parse_simple_frame_crlf,
+ parse_command_only,
+ parse_command_prefixed_with_newline,
+ parse_ignore_empty_frames,
+ parse_heartbeat_interframe,
+ parse_crlf_interframe,
+ parse_carriage_return_not_ignored_interframe,
+ parse_carriage_return_mid_command,
+ parse_carriage_return_end_command,
+ parse_resume_mid_command,
+ parse_resume_mid_header_key,
+ parse_resume_mid_header_val,
+ parse_resume_mid_body,
+ parse_no_header_stripping,
+ parse_multiple_headers,
+ header_no_colon,
+ no_nested_escapes,
+ header_name_with_cr,
+ header_value_with_cr,
+ header_value_with_colon,
+ headers_escaping_roundtrip,
+ headers_escaping_roundtrip_without_trailing_lf
+ ].
+
+parse_simple_frame(_) ->
+ parse_simple_frame_gen("\n").
+
+parse_simple_frame_crlf(_) ->
+ parse_simple_frame_gen("\r\n").
+
+parse_simple_frame_gen(Term) ->
+ Headers = [{"header1", "value1"}, {"header2", "value2"}],
+ Content = frame_string("COMMAND",
+ Headers,
+ "Body Content",
+ Term),
+ {"COMMAND", Frame, _State} = parse_complete(Content),
+ [?assertEqual({ok, Value},
+ rabbit_stomp_frame:header(Frame, Key)) ||
+ {Key, Value} <- Headers],
+ #stomp_frame{body_iolist = Body} = Frame,
+ ?assertEqual(<<"Body Content">>, iolist_to_binary(Body)).
+
+parse_command_only(_) ->
+ {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("COMMAND\n\n\0").
+
+parse_command_prefixed_with_newline(_) ->
+ {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("\nCOMMAND\n\n\0").
+
+parse_ignore_empty_frames(_) ->
+ {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("\0\0COMMAND\n\n\0").
+
+parse_heartbeat_interframe(_) ->
+ {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("\nCOMMAND\n\n\0").
+
+parse_crlf_interframe(_) ->
+ {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse("\r\nCOMMAND\n\n\0").
+
+parse_carriage_return_not_ignored_interframe(_) ->
+ {error, {unexpected_chars_between_frames, "\rC"}} = parse("\rCOMMAND\n\n\0").
+
+parse_carriage_return_mid_command(_) ->
+ {error, {unexpected_chars_in_command, "\rA"}} = parse("COMM\rAND\n\n\0").
+
+parse_carriage_return_end_command(_) ->
+ {error, {unexpected_chars_in_command, "\r\r"}} = parse("COMMAND\r\r\n\n\0").
+
+parse_resume_mid_command(_) ->
+ First = "COMM",
+ Second = "AND\n\n\0",
+ {more, Resume} = parse(First),
+ {ok, #stomp_frame{command = "COMMAND"}, _Rest} = parse(Second, Resume).
+
+parse_resume_mid_header_key(_) ->
+ First = "COMMAND\nheade",
+ Second = "r1:value1\n\n\0",
+ {more, Resume} = parse(First),
+ {ok, Frame = #stomp_frame{command = "COMMAND"}, _Rest} =
+ parse(Second, Resume),
+ ?assertEqual({ok, "value1"},
+ rabbit_stomp_frame:header(Frame, "header1")).
+
+parse_resume_mid_header_val(_) ->
+ First = "COMMAND\nheader1:val",
+ Second = "ue1\n\n\0",
+ {more, Resume} = parse(First),
+ {ok, Frame = #stomp_frame{command = "COMMAND"}, _Rest} =
+ parse(Second, Resume),
+ ?assertEqual({ok, "value1"},
+ rabbit_stomp_frame:header(Frame, "header1")).
+
+parse_resume_mid_body(_) ->
+ First = "COMMAND\n\nABC",
+ Second = "DEF\0",
+ {more, Resume} = parse(First),
+ {ok, #stomp_frame{command = "COMMAND", body_iolist = Body}, _Rest} =
+ parse(Second, Resume),
+ ?assertEqual([<<"ABC">>, <<"DEF">>], Body).
+
+parse_no_header_stripping(_) ->
+ Content = "COMMAND\nheader: foo \n\n\0",
+ {ok, Frame, _} = parse(Content),
+ {ok, Val} = rabbit_stomp_frame:header(Frame, "header"),
+ ?assertEqual(" foo ", Val).
+
+parse_multiple_headers(_) ->
+ Content = "COMMAND\nheader:correct\nheader:incorrect\n\n\0",
+ {ok, Frame, _} = parse(Content),
+ {ok, Val} = rabbit_stomp_frame:header(Frame, "header"),
+ ?assertEqual("correct", Val).
+
+header_no_colon(_) ->
+ Content = "COMMAND\n"
+ "hdr1:val1\n"
+ "hdrerror\n"
+ "hdr2:val2\n"
+ "\n\0",
+ ?assertEqual(parse(Content), {error, {header_no_value, "hdrerror"}}).
+
+no_nested_escapes(_) ->
+ Content = "COM\\\\rAND\n" % no escapes
+ "hdr\\\\rname:" % one escape
+ "hdr\\\\rval\n\n\0", % one escape
+ {ok, Frame, _} = parse(Content),
+ ?assertEqual(Frame,
+ #stomp_frame{command = "COM\\\\rAND",
+ headers = [{"hdr\\rname", "hdr\\rval"}],
+ body_iolist = []}).
+
+header_name_with_cr(_) ->
+ Content = "COMMAND\nhead\rer:val\n\n\0",
+ {error, {unexpected_chars_in_header, "\re"}} = parse(Content).
+
+header_value_with_cr(_) ->
+ Content = "COMMAND\nheader:val\rue\n\n\0",
+ {error, {unexpected_chars_in_header, "\ru"}} = parse(Content).
+
+header_value_with_colon(_) ->
+ Content = "COMMAND\nheader:val:ue\n\n\0",
+ {ok, Frame, _} = parse(Content),
+ ?assertEqual(Frame,
+ #stomp_frame{ command = "COMMAND",
+ headers = [{"header", "val:ue"}],
+ body_iolist = []}).
+
+test_frame_serialization(Expected, TrailingLF) ->
+ {ok, Frame, _} = parse(Expected),
+ {ok, Val} = rabbit_stomp_frame:header(Frame, "head\r:\ner"),
+ ?assertEqual(":\n\r\\", Val),
+ Serialized = lists:flatten(rabbit_stomp_frame:serialize(Frame, TrailingLF)),
+ ?assertEqual(Expected, rabbit_misc:format("~s", [Serialized])).
+
+headers_escaping_roundtrip(_) ->
+ test_frame_serialization("COMMAND\nhead\\r\\c\\ner:\\c\\n\\r\\\\\n\n\0\n", true).
+
+headers_escaping_roundtrip_without_trailing_lf(_) ->
+ test_frame_serialization("COMMAND\nhead\\r\\c\\ner:\\c\\n\\r\\\\\n\n\0", false).
+
+parse(Content) ->
+ parse(Content, rabbit_stomp_frame:initial_state()).
+parse(Content, State) ->
+ rabbit_stomp_frame:parse(list_to_binary(Content), State).
+
+parse_complete(Content) ->
+ {ok, Frame = #stomp_frame{command = Command}, State} = parse(Content),
+ {Command, Frame, State}.
+
+frame_string(Command, Headers, BodyContent, Term) ->
+ HeaderString =
+ lists:flatten([Key ++ ":" ++ Value ++ Term || {Key, Value} <- Headers]),
+ Command ++ Term ++ HeaderString ++ Term ++ BodyContent ++ "\0" ++ "\n".
+
diff --git a/deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl b/deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl
new file mode 100644
index 0000000000..46c1c6c743
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/proxy_protocol_SUITE.erl
@@ -0,0 +1,104 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(proxy_protocol_SUITE).
+-compile([export_all]).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+-define(TIMEOUT, 5000).
+
+all() ->
+ [
+ {group, non_parallel_tests}
+ ].
+
+groups() ->
+ [
+ {non_parallel_tests, [], [
+ proxy_protocol,
+ proxy_protocol_tls
+ ]}
+ ].
+
+init_per_suite(Config) ->
+ rabbit_ct_helpers:log_environment(),
+ Suffix = rabbit_ct_helpers:testcase_absname(Config, "", "-"),
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {rmq_nodename_suffix, Suffix},
+ {rmq_certspwd, "bunnychow"},
+ {rabbitmq_ct_tls_verify, verify_none}
+ ]),
+ MqttConfig = stomp_config(),
+ rabbit_ct_helpers:run_setup_steps(Config1,
+ [ fun(Conf) -> merge_app_env(MqttConfig, Conf) end ] ++
+ rabbit_ct_broker_helpers:setup_steps() ++
+ rabbit_ct_client_helpers:setup_steps()).
+
+stomp_config() ->
+ {rabbitmq_stomp, [
+ {proxy_protocol, true}
+ ]}.
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ rabbit_ct_client_helpers:teardown_steps() ++
+ rabbit_ct_broker_helpers:teardown_steps()).
+
+init_per_group(_, Config) -> Config.
+end_per_group(_, Config) -> Config.
+
+init_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_started(Config, Testcase).
+
+end_per_testcase(Testcase, Config) ->
+ rabbit_ct_helpers:testcase_finished(Config, Testcase).
+
+proxy_protocol(Config) ->
+ Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
+ {ok, Socket} = gen_tcp:connect({127,0,0,1}, Port,
+ [binary, {active, false}, {packet, raw}]),
+ ok = inet:send(Socket, "PROXY TCP4 192.168.1.1 192.168.1.2 80 81\r\n"),
+ ok = inet:send(Socket, stomp_connect_frame()),
+ {ok, _Packet} = gen_tcp:recv(Socket, 0, ?TIMEOUT),
+ ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
+ ?MODULE, connection_name, []),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ gen_tcp:close(Socket),
+ ok.
+
+proxy_protocol_tls(Config) ->
+ app_utils:start_applications([asn1, crypto, public_key, ssl]),
+ Port = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp_tls),
+ {ok, Socket} = gen_tcp:connect({127,0,0,1}, Port,
+ [binary, {active, false}, {packet, raw}]),
+ ok = inet:send(Socket, "PROXY TCP4 192.168.1.1 192.168.1.2 80 81\r\n"),
+ {ok, SslSocket} = ssl:connect(Socket, [], ?TIMEOUT),
+ ok = ssl:send(SslSocket, stomp_connect_frame()),
+ {ok, _Packet} = ssl:recv(SslSocket, 0, ?TIMEOUT),
+ ConnectionName = rabbit_ct_broker_helpers:rpc(Config, 0,
+ ?MODULE, connection_name, []),
+ match = re:run(ConnectionName, <<"^192.168.1.1:80 ">>, [{capture, none}]),
+ gen_tcp:close(Socket),
+ ok.
+
+connection_name() ->
+ Connections = ets:tab2list(connection_created),
+ {_Key, Values} = lists:nth(1, Connections),
+ {_, Name} = lists:keyfind(name, 1, Values),
+ Name.
+
+merge_app_env(MqttConfig, Config) ->
+ rabbit_ct_helpers:merge_app_env(Config, MqttConfig).
+
+stomp_connect_frame() ->
+ <<"CONNECT\n",
+ "login:guest\n",
+ "passcode:guest\n",
+ "\n",
+ 0>>. \ No newline at end of file
diff --git a/deps/rabbitmq_stomp/test/python_SUITE.erl b/deps/rabbitmq_stomp/test/python_SUITE.erl
new file mode 100644
index 0000000000..9613b25032
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE.erl
@@ -0,0 +1,72 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(python_SUITE).
+-compile(export_all).
+-include_lib("common_test/include/ct.hrl").
+
+all() ->
+ [
+ common,
+ ssl,
+ connect_options
+ ].
+
+init_per_testcase(TestCase, Config) ->
+ Suffix = rabbit_ct_helpers:testcase_absname(Config, TestCase, "-"),
+ Config1 = rabbit_ct_helpers:set_config(Config,
+ [{rmq_certspwd, "bunnychow"},
+ {rmq_nodename_suffix, Suffix}]),
+ rabbit_ct_helpers:log_environment(),
+ Config2 = rabbit_ct_helpers:run_setup_steps(
+ Config1,
+ rabbit_ct_broker_helpers:setup_steps()),
+ DataDir = ?config(data_dir, Config2),
+ PikaDir = filename:join([DataDir, "deps", "pika"]),
+ StomppyDir = filename:join([DataDir, "deps", "stomppy"]),
+ rabbit_ct_helpers:make(Config2, PikaDir, []),
+ rabbit_ct_helpers:make(Config2, StomppyDir, []),
+ Config2.
+
+end_per_testcase(_, Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ rabbit_ct_broker_helpers:teardown_steps()).
+
+
+common(Config) ->
+ run(Config, filename:join("src", "test.py")).
+
+connect_options(Config) ->
+ run(Config, filename:join("src", "test_connect_options.py")).
+
+ssl(Config) ->
+ run(Config, filename:join("src", "test_ssl.py")).
+
+run(Config, Test) ->
+ DataDir = ?config(data_dir, Config),
+ CertsDir = rabbit_ct_helpers:get_config(Config, rmq_certsdir),
+ StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
+ StompPortTls = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp_tls),
+ AmqpPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp),
+ NodeName = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename),
+ PythonPath = os:getenv("PYTHONPATH"),
+ os:putenv("PYTHONPATH", filename:join([DataDir, "deps", "pika","pika"])
+ ++":"++
+ filename:join([DataDir, "deps", "stomppy", "stomppy"])
+ ++ ":" ++
+ PythonPath),
+ os:putenv("AMQP_PORT", integer_to_list(AmqpPort)),
+ os:putenv("STOMP_PORT", integer_to_list(StompPort)),
+ os:putenv("STOMP_PORT_TLS", integer_to_list(StompPortTls)),
+ os:putenv("RABBITMQ_NODENAME", atom_to_list(NodeName)),
+ os:putenv("SSL_CERTS_PATH", CertsDir),
+ {ok, _} = rabbit_ct_helpers:exec([filename:join(DataDir, Test)]).
+
+
+cur_dir() ->
+ {ok, Src} = filelib:find_source(?MODULE),
+ filename:dirname(Src).
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/deps/pika/Makefile b/deps/rabbitmq_stomp/test/python_SUITE_data/deps/pika/Makefile
new file mode 100644
index 0000000000..10aa6f0212
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/deps/pika/Makefile
@@ -0,0 +1,27 @@
+UPSTREAM_GIT=https://github.com/pika/pika.git
+REVISION=1.1.0
+
+LIB_DIR=pika
+CHECKOUT_DIR=pika-$(REVISION)
+
+TARGETS=$(LIB_DIR)
+
+all: $(TARGETS)
+
+clean:
+ rm -rf $(LIB_DIR)
+
+distclean: clean
+ rm -rf $(CHECKOUT_DIR)
+
+$(LIB_DIR) : $(CHECKOUT_DIR)
+ rm -rf $@
+ cp -R $< $@
+
+$(CHECKOUT_DIR):
+ git clone --depth 1 --branch $(REVISION) $(UPSTREAM_GIT) $@ || \
+ (rm -rf $@; exit 1)
+
+echo-revision:
+ @echo $(REVISION)
+
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile b/deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile
new file mode 100644
index 0000000000..40f5bd1db7
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/deps/stomppy/Makefile
@@ -0,0 +1,27 @@
+UPSTREAM_GIT=https://github.com/jasonrbriggs/stomp.py.git
+REVISION=v4.0.16
+
+LIB_DIR=stomppy
+CHECKOUT_DIR=stomppy-git
+
+TARGETS=$(LIB_DIR)
+
+all: $(TARGETS)
+
+clean:
+ rm -rf $(LIB_DIR)
+
+distclean: clean
+ rm -rf $(CHECKOUT_DIR)
+
+$(LIB_DIR) : $(CHECKOUT_DIR)
+ rm -rf $@
+ cp -R $< $@
+
+$(CHECKOUT_DIR):
+ git clone $(UPSTREAM_GIT) $@
+ (cd $@ && git checkout $(REVISION)) || rm -rf $@
+
+echo-revision:
+ @echo $(REVISION)
+
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py
new file mode 100644
index 0000000000..9103bc76ea
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/ack.py
@@ -0,0 +1,252 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import time
+import os
+
+class TestAck(base.BaseTest):
+
+ def test_ack_client(self):
+ destination = "/queue/ack-test"
+
+ # subscribe and send message
+ self.listener.reset(2) ## expecting 2 messages
+ self.subscribe_dest(self.conn, destination, None,
+ ack='client',
+ headers={'prefetch-count': '10'})
+ self.conn.send(destination, "test1")
+ self.conn.send(destination, "test2")
+ self.assertTrue(self.listener.wait(4), "initial message not received")
+ self.assertEquals(2, len(self.listener.messages))
+
+ # disconnect with no ack
+ self.conn.disconnect()
+
+ # now reconnect
+ conn2 = self.create_connection()
+ try:
+ listener2 = base.WaitableListener()
+ listener2.reset(2)
+ conn2.set_listener('', listener2)
+ self.subscribe_dest(conn2, destination, None,
+ ack='client',
+ headers={'prefetch-count': '10'})
+ self.assertTrue(listener2.wait(), "message not received again")
+ self.assertEquals(2, len(listener2.messages))
+
+ # now ack only the last message - expecting cumulative behaviour
+ mid = listener2.messages[1]['headers'][self.ack_id_source_header]
+ self.ack_message(conn2, mid, None)
+ finally:
+ conn2.disconnect()
+
+ # now reconnect again, shouldn't see the message
+ conn3 = self.create_connection()
+ try:
+ listener3 = base.WaitableListener()
+ conn3.set_listener('', listener3)
+ self.subscribe_dest(conn3, destination, None)
+ self.assertFalse(listener3.wait(3),
+ "unexpected message. ACK not working?")
+ finally:
+ conn3.disconnect()
+
+ def test_ack_client_individual(self):
+ destination = "/queue/ack-test-individual"
+
+ # subscribe and send message
+ self.listener.reset(2) ## expecting 2 messages
+ self.subscribe_dest(self.conn, destination, None,
+ ack='client-individual',
+ headers={'prefetch-count': '10'})
+ self.conn.send(destination, "test1")
+ self.conn.send(destination, "test2")
+ self.assertTrue(self.listener.wait(4), "Both initial messages not received")
+ self.assertEquals(2, len(self.listener.messages))
+
+ # disconnect without acks
+ self.conn.disconnect()
+
+ # now reconnect
+ conn2 = self.create_connection()
+ try:
+ listener2 = base.WaitableListener()
+ listener2.reset(2) ## expect 2 messages
+ conn2.set_listener('', listener2)
+ self.subscribe_dest(conn2, destination, None,
+ ack='client-individual',
+ headers={'prefetch-count': '10'})
+ self.assertTrue(listener2.wait(2.5), "Did not receive 2 messages")
+ self.assertEquals(2, len(listener2.messages), "Not exactly 2 messages received")
+
+ # now ack only the 'test2' message - expecting individual behaviour
+ nummsgs = len(listener2.messages)
+ mid = None
+ for ind in range(nummsgs):
+ if listener2.messages[ind]['message']=="test2":
+ mid = listener2.messages[ind]['headers'][self.ack_id_source_header]
+ self.assertEquals(1, ind, 'Expecting test2 to be second message')
+ break
+ self.assertTrue(mid, "Did not find test2 message id.")
+ self.ack_message(conn2, mid, None)
+ finally:
+ conn2.disconnect()
+
+ # now reconnect again, shouldn't see the message
+ conn3 = self.create_connection()
+ try:
+ listener3 = base.WaitableListener()
+ listener3.reset(2) ## expecting a single message, but wait for two
+ conn3.set_listener('', listener3)
+ self.subscribe_dest(conn3, destination, None)
+ self.assertFalse(listener3.wait(2.5),
+ "Expected to see only one message. ACK not working?")
+ self.assertEquals(1, len(listener3.messages), "Expecting exactly one message")
+ self.assertEquals("test1", listener3.messages[0]['message'], "Unexpected message remains")
+ finally:
+ conn3.disconnect()
+
+ def test_ack_client_tx(self):
+ destination = "/queue/ack-test-tx"
+
+ # subscribe and send message
+ self.listener.reset()
+ self.subscribe_dest(self.conn, destination, None, ack='client')
+ self.conn.send(destination, "test")
+ self.assertTrue(self.listener.wait(3), "initial message not received")
+ self.assertEquals(1, len(self.listener.messages))
+
+ # disconnect with no ack
+ self.conn.disconnect()
+
+ # now reconnect
+ conn2 = self.create_connection()
+ try:
+ tx = "abc"
+ listener2 = base.WaitableListener()
+ conn2.set_listener('', listener2)
+ conn2.begin(transaction=tx)
+ self.subscribe_dest(conn2, destination, None, ack='client')
+ self.assertTrue(listener2.wait(), "message not received again")
+ self.assertEquals(1, len(listener2.messages))
+
+ # now ack
+ mid = listener2.messages[0]['headers'][self.ack_id_source_header]
+ self.ack_message(conn2, mid, None, transaction=tx)
+
+ #now commit
+ conn2.commit(transaction=tx)
+ finally:
+ conn2.disconnect()
+
+ # now reconnect again, shouldn't see the message
+ conn3 = self.create_connection()
+ try:
+ listener3 = base.WaitableListener()
+ conn3.set_listener('', listener3)
+ self.subscribe_dest(conn3, destination, None)
+ self.assertFalse(listener3.wait(3),
+ "unexpected message. TX ACK not working?")
+ finally:
+ conn3.disconnect()
+
+ def test_topic_prefetch(self):
+ destination = "/topic/prefetch-test"
+
+ # subscribe and send message
+ self.listener.reset(6) ## expect 6 messages
+ self.subscribe_dest(self.conn, destination, None,
+ ack='client',
+ headers={'prefetch-count': '5'})
+
+ for x in range(10):
+ self.conn.send(destination, "test" + str(x))
+
+ self.assertFalse(self.listener.wait(3),
+ "Should not have been able to see 6 messages")
+ self.assertEquals(5, len(self.listener.messages))
+
+ def test_nack(self):
+ destination = "/queue/nack-test"
+
+ #subscribe and send
+ self.subscribe_dest(self.conn, destination, None,
+ ack='client-individual')
+ self.conn.send(destination, "nack-test")
+
+ self.assertTrue(self.listener.wait(), "Not received message")
+ message_id = self.listener.messages[0]['headers'][self.ack_id_source_header]
+ self.listener.reset()
+
+ self.nack_message(self.conn, message_id, None)
+ self.assertTrue(self.listener.wait(), "Not received message after NACK")
+ message_id = self.listener.messages[0]['headers'][self.ack_id_source_header]
+ self.ack_message(self.conn, message_id, None)
+
+ def test_nack_multi(self):
+ destination = "/queue/nack-multi"
+
+ self.listener.reset(2)
+
+ #subscribe and send
+ self.subscribe_dest(self.conn, destination, None,
+ ack='client',
+ headers = {'prefetch-count' : '10'})
+ self.conn.send(destination, "nack-test1")
+ self.conn.send(destination, "nack-test2")
+
+ self.assertTrue(self.listener.wait(), "Not received messages")
+ message_id = self.listener.messages[1]['headers'][self.ack_id_source_header]
+ self.listener.reset(2)
+
+ self.nack_message(self.conn, message_id, None)
+ self.assertTrue(self.listener.wait(), "Not received message again")
+ message_id = self.listener.messages[1]['headers'][self.ack_id_source_header]
+ self.ack_message(self.conn, message_id, None)
+
+ def test_nack_without_requeueing(self):
+ destination = "/queue/nack-test-no-requeue"
+
+ self.subscribe_dest(self.conn, destination, None,
+ ack='client-individual')
+ self.conn.send(destination, "nack-test")
+
+ self.assertTrue(self.listener.wait(), "Not received message")
+ message_id = self.listener.messages[0]['headers'][self.ack_id_source_header]
+ self.listener.reset()
+
+ self.conn.send_frame("NACK", {self.ack_id_header: message_id, "requeue": False})
+ self.assertFalse(self.listener.wait(4), "Received message after NACK with requeue = False")
+
+class TestAck11(TestAck):
+
+ def create_connection_obj(self, version='1.1', vhost='/', heartbeats=(0, 0)):
+ conn = stomp.StompConnection11(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
+ vhost=vhost,
+ heartbeats=heartbeats)
+ self.ack_id_source_header = 'message-id'
+ self.ack_id_header = 'message-id'
+ return conn
+
+ def test_version(self):
+ self.assertEquals('1.1', self.conn.version)
+
+class TestAck12(TestAck):
+
+ def create_connection_obj(self, version='1.2', vhost='/', heartbeats=(0, 0)):
+ conn = stomp.StompConnection12(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
+ vhost=vhost,
+ heartbeats=heartbeats)
+ self.ack_id_source_header = 'ack'
+ self.ack_id_header = 'id'
+ return conn
+
+ def test_version(self):
+ self.assertEquals('1.2', self.conn.version)
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py
new file mode 100644
index 0000000000..2c5ee45a8e
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/amqp_headers.py
@@ -0,0 +1,42 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import pika
+import base
+import os
+
+class TestAmqpHeaders(base.BaseTest):
+ def test_headers_to_stomp(self):
+ self.listener.reset(1)
+ queueName='test-amqp-headers-to-stomp'
+
+ # Set up STOMP subscription
+ self.subscribe_dest(self.conn, '/topic/test', None, headers={'x-queue-name': queueName})
+
+ # Set up AMQP connection
+ amqp_params = pika.ConnectionParameters(host='localhost', port=int(os.environ["AMQP_PORT"]))
+ amqp_conn = pika.BlockingConnection(amqp_params)
+ amqp_chan = amqp_conn.channel()
+
+ # publish a message with headers to the named AMQP queue
+ amqp_headers = { 'x-custom-hdr-1': 'value1',
+ 'x-custom-hdr-2': 'value2',
+ 'custom-hdr-3': 'value3' }
+ amqp_props = pika.BasicProperties(headers=amqp_headers)
+ amqp_chan.basic_publish(exchange='', routing_key=queueName, body='Hello World!', properties=amqp_props)
+
+ # check if we receive the message from the STOMP subscription
+ self.assertTrue(self.listener.wait(2), "initial message not received")
+ self.assertEquals(1, len(self.listener.messages))
+ msg = self.listener.messages[0]
+ self.assertEquals('Hello World!', msg['message'])
+ self.assertEquals('value1', msg['headers']['x-custom-hdr-1'])
+ self.assertEquals('value2', msg['headers']['x-custom-hdr-2'])
+ self.assertEquals('value3', msg['headers']['custom-hdr-3'])
+
+ self.conn.disconnect()
+ amqp_conn.close()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py
new file mode 100644
index 0000000000..a8f7ef59b9
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/base.py
@@ -0,0 +1,259 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import sys
+import threading
+import os
+
+
+class BaseTest(unittest.TestCase):
+
+ def create_connection_obj(self, version='1.0', vhost='/', heartbeats=(0, 0)):
+ if version == '1.0':
+ conn = stomp.StompConnection10(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))])
+ self.ack_id_source_header = 'message-id'
+ self.ack_id_header = 'message-id'
+ elif version == '1.1':
+ conn = stomp.StompConnection11(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
+ vhost=vhost,
+ heartbeats=heartbeats)
+ self.ack_id_source_header = 'message-id'
+ self.ack_id_header = 'message-id'
+ elif version == '1.2':
+ conn = stomp.StompConnection12(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
+ vhost=vhost,
+ heartbeats=heartbeats)
+ self.ack_id_source_header = 'ack'
+ self.ack_id_header = 'id'
+ else:
+ conn = stomp.StompConnection12(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))],
+ vhost=vhost,
+ heartbeats=heartbeats)
+ conn.version = version
+ return conn
+
+ def create_connection(self, user='guest', passcode='guest', wait=True, **kwargs):
+ conn = self.create_connection_obj(**kwargs)
+ conn.start()
+ conn.connect(user, passcode, wait=wait)
+ return conn
+
+ def subscribe_dest(self, conn, destination, sub_id, **kwargs):
+ if type(conn) is stomp.StompConnection10:
+ # 'id' is optional in STOMP 1.0.
+ if sub_id != None:
+ kwargs['id'] = sub_id
+ conn.subscribe(destination, **kwargs)
+ else:
+ # 'id' is required in STOMP 1.1+.
+ if sub_id == None:
+ sub_id = 'ctag'
+ conn.subscribe(destination, sub_id, **kwargs)
+
+ def unsubscribe_dest(self, conn, destination, sub_id, **kwargs):
+ if type(conn) is stomp.StompConnection10:
+ # 'id' is optional in STOMP 1.0.
+ if sub_id != None:
+ conn.unsubscribe(id=sub_id, **kwargs)
+ else:
+ conn.unsubscribe(destination=destination, **kwargs)
+ else:
+ # 'id' is required in STOMP 1.1+.
+ if sub_id == None:
+ sub_id = 'ctag'
+ conn.unsubscribe(sub_id, **kwargs)
+
+ def ack_message(self, conn, msg_id, sub_id, **kwargs):
+ if type(conn) is stomp.StompConnection10:
+ conn.ack(msg_id, **kwargs)
+ elif type(conn) is stomp.StompConnection11:
+ if sub_id == None:
+ sub_id = 'ctag'
+ conn.ack(msg_id, sub_id, **kwargs)
+ elif type(conn) is stomp.StompConnection12:
+ conn.ack(msg_id, **kwargs)
+
+ def nack_message(self, conn, msg_id, sub_id, **kwargs):
+ if type(conn) is stomp.StompConnection10:
+ # Normally unsupported by STOMP 1.0.
+ conn.send_frame("NACK", {"message-id": msg_id})
+ elif type(conn) is stomp.StompConnection11:
+ if sub_id == None:
+ sub_id = 'ctag'
+ conn.nack(msg_id, sub_id, **kwargs)
+ elif type(conn) is stomp.StompConnection12:
+ conn.nack(msg_id, **kwargs)
+
+ def create_subscriber_connection(self, dest):
+ conn = self.create_connection()
+ listener = WaitableListener()
+ conn.set_listener('', listener)
+ self.subscribe_dest(conn, dest, None, receipt="sub.receipt")
+ listener.wait()
+ self.assertEquals(1, len(listener.receipts))
+ listener.reset()
+ return conn, listener
+
+ def setUp(self):
+ # Note: useful for debugging
+ # import stomp.listener
+ self.conn = self.create_connection()
+ self.listener = WaitableListener()
+ self.conn.set_listener('waitable', self.listener)
+ # Note: useful for debugging
+ # self.printing_listener = stomp.listener.PrintingListener()
+ # self.conn.set_listener('printing', self.printing_listener)
+
+ def tearDown(self):
+ if self.conn.is_connected():
+ self.conn.disconnect()
+ self.conn.stop()
+
+ def simple_test_send_rec(self, dest, headers={}):
+ self.listener.reset()
+
+ self.subscribe_dest(self.conn, dest, None)
+ self.conn.send(dest, "foo", headers=headers)
+
+ self.assertTrue(self.listener.wait(), "Timeout, no message received")
+
+ # assert no errors
+ if len(self.listener.errors) > 0:
+ self.fail(self.listener.errors[0]['message'])
+
+ # check header content
+ msg = self.listener.messages[0]
+ self.assertEquals("foo", msg['message'])
+ self.assertEquals(dest, msg['headers']['destination'])
+ return msg['headers']
+
+ def assertListener(self, errMsg, numMsgs=0, numErrs=0, numRcts=0, timeout=10):
+ if numMsgs + numErrs + numRcts > 0:
+ self._assertTrue(self.listener.wait(timeout), errMsg + " (#awaiting)")
+ else:
+ self._assertFalse(self.listener.wait(timeout), errMsg + " (#awaiting)")
+ self._assertEquals(numMsgs, len(self.listener.messages), errMsg + " (#messages)")
+ self._assertEquals(numErrs, len(self.listener.errors), errMsg + " (#errors)")
+ self._assertEquals(numRcts, len(self.listener.receipts), errMsg + " (#receipts)")
+
+ def _assertTrue(self, bool, msg):
+ if not bool:
+ self.listener.print_state(msg, True)
+ self.assertTrue(bool, msg)
+
+ def _assertFalse(self, bool, msg):
+ if bool:
+ self.listener.print_state(msg, True)
+ self.assertFalse(bool, msg)
+
+ def _assertEquals(self, expected, actual, msg):
+ if expected != actual:
+ self.listener.print_state(msg, True)
+ self.assertEquals(expected, actual, msg)
+
+ def assertListenerAfter(self, verb, errMsg="", numMsgs=0, numErrs=0, numRcts=0, timeout=5):
+ num = numMsgs + numErrs + numRcts
+ self.listener.reset(num if num>0 else 1)
+ verb()
+ self.assertListener(errMsg=errMsg, numMsgs=numMsgs, numErrs=numErrs, numRcts=numRcts, timeout=timeout)
+
+class WaitableListener(object):
+
+ def __init__(self):
+ self.debug = False
+ if self.debug:
+ print('(listener) init')
+ self.messages = []
+ self.errors = []
+ self.receipts = []
+ self.latch = Latch(1)
+ self.msg_no = 0
+
+ def _next_msg_no(self):
+ self.msg_no += 1
+ return self.msg_no
+
+ def _append(self, array, msg, hdrs):
+ mno = self._next_msg_no()
+ array.append({'message' : msg, 'headers' : hdrs, 'msg_no' : mno})
+ self.latch.countdown()
+
+ def on_receipt(self, headers, message):
+ if self.debug:
+ print('(on_receipt) message: {}, headers: {}'.format(message, headers))
+ self._append(self.receipts, message, headers)
+
+ def on_error(self, headers, message):
+ if self.debug:
+ print('(on_error) message: {}, headers: {}'.format(message, headers))
+ self._append(self.errors, message, headers)
+
+ def on_message(self, headers, message):
+ if self.debug:
+ print('(on_message) message: {}, headers: {}'.format(message, headers))
+ self._append(self.messages, message, headers)
+
+ def reset(self, count=1):
+ if self.debug:
+ self.print_state('(reset listener--old state)')
+ self.messages = []
+ self.errors = []
+ self.receipts = []
+ self.latch = Latch(count)
+ self.msg_no = 0
+ if self.debug:
+ self.print_state('(reset listener--new state)')
+
+ def wait(self, timeout=10):
+ return self.latch.wait(timeout)
+
+ def print_state(self, hdr="", full=False):
+ print(hdr)
+ print('#messages: {}'.format(len(self.messages)))
+ print('#errors: {}', len(self.errors))
+ print('#receipts: {}'.format(len(self.receipts)))
+ print('Remaining count: {}'.format(self.latch.get_count()))
+ if full:
+ if len(self.messages) != 0: print('Messages: {}'.format(self.messages))
+ if len(self.errors) != 0: print('Messages: {}'.format(self.errors))
+ if len(self.receipts) != 0: print('Messages: {}'.format(self.receipts))
+
+class Latch(object):
+
+ def __init__(self, count=1):
+ self.cond = threading.Condition()
+ self.cond.acquire()
+ self.count = count
+ self.cond.release()
+
+ def countdown(self):
+ self.cond.acquire()
+ if self.count > 0:
+ self.count -= 1
+ if self.count == 0:
+ self.cond.notify_all()
+ self.cond.release()
+
+ def wait(self, timeout=None):
+ try:
+ self.cond.acquire()
+ if self.count == 0:
+ return True
+ else:
+ self.cond.wait(timeout)
+ return self.count == 0
+ finally:
+ self.cond.release()
+
+ def get_count(self):
+ try:
+ self.cond.acquire()
+ return self.count
+ finally:
+ self.cond.release()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_options.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_options.py
new file mode 100644
index 0000000000..f71c4acf70
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/connect_options.py
@@ -0,0 +1,51 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import test_util
+import os
+
+class TestConnectOptions(base.BaseTest):
+
+ def test_implicit_connect(self):
+ ''' Implicit connect with receipt on first command '''
+ self.conn.disconnect()
+ test_util.enable_implicit_connect()
+ listener = base.WaitableListener()
+ new_conn = stomp.Connection(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))])
+ new_conn.set_listener('', listener)
+
+ new_conn.start() # not going to issue connect
+ self.subscribe_dest(new_conn, "/topic/implicit", 'sub_implicit',
+ receipt='implicit')
+
+ try:
+ self.assertTrue(listener.wait(5))
+ self.assertEquals(1, len(listener.receipts),
+ 'Missing receipt. Likely not connected')
+ self.assertEquals('implicit', listener.receipts[0]['headers']['receipt-id'])
+ finally:
+ new_conn.disconnect()
+ test_util.disable_implicit_connect()
+
+ def test_default_user(self):
+ ''' Default user connection '''
+ self.conn.disconnect()
+ test_util.enable_default_user()
+ listener = base.WaitableListener()
+ new_conn = stomp.Connection(host_and_ports=[('localhost', int(os.environ["STOMP_PORT"]))])
+ new_conn.set_listener('', listener)
+ new_conn.start()
+ new_conn.connect()
+ try:
+ self.assertFalse(listener.wait(3)) # no error back
+ self.assertTrue(new_conn.is_connected())
+ finally:
+ new_conn.disconnect()
+ test_util.disable_default_user()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py
new file mode 100644
index 0000000000..76e5402686
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/destinations.py
@@ -0,0 +1,536 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import time
+
+class TestExchange(base.BaseTest):
+
+
+ def test_amq_direct(self):
+ ''' Test basic send/receive for /exchange/amq.direct '''
+ self.__test_exchange_send_rec("amq.direct", "route")
+
+ def test_amq_topic(self):
+ ''' Test basic send/receive for /exchange/amq.topic '''
+ self.__test_exchange_send_rec("amq.topic", "route")
+
+ def test_amq_fanout(self):
+ ''' Test basic send/receive for /exchange/amq.fanout '''
+ self.__test_exchange_send_rec("amq.fanout", "route")
+
+ def test_amq_fanout_no_route(self):
+ ''' Test basic send/receive, /exchange/amq.direct, no routing key'''
+ self.__test_exchange_send_rec("amq.fanout")
+
+ def test_invalid_exchange(self):
+ ''' Test invalid exchange error '''
+ self.listener.reset(1)
+ self.subscribe_dest(self.conn, "/exchange/does.not.exist", None,
+ ack="auto")
+ self.assertListener("Expecting an error", numErrs=1)
+ err = self.listener.errors[0]
+ self.assertEquals("not_found", err['headers']['message'])
+ self.assertEquals(
+ "NOT_FOUND - no exchange 'does.not.exist' in vhost '/'\n",
+ err['message'])
+ time.sleep(1)
+ self.assertFalse(self.conn.is_connected())
+
+ def __test_exchange_send_rec(self, exchange, route = None):
+ if exchange != "amq.topic":
+ dest = "/exchange/" + exchange
+ else:
+ dest = "/topic"
+ if route != None:
+ dest += "/" + route
+
+ self.simple_test_send_rec(dest)
+
+class TestQueue(base.BaseTest):
+
+ def test_send_receive(self):
+ ''' Test basic send/receive for /queue '''
+ destination = '/queue/test'
+ self.simple_test_send_rec(destination)
+
+ def test_send_receive_in_other_conn(self):
+ ''' Test send in one connection, receive in another '''
+ destination = '/queue/test2'
+
+ # send
+ self.conn.send(destination, "hello")
+
+ # now receive
+ conn2 = self.create_connection()
+ try:
+ listener2 = base.WaitableListener()
+ conn2.set_listener('', listener2)
+
+ self.subscribe_dest(conn2, destination, None, ack="auto")
+ self.assertTrue(listener2.wait(10), "no receive")
+ finally:
+ conn2.disconnect()
+
+ def test_send_receive_in_other_conn_with_disconnect(self):
+ ''' Test send, disconnect, receive '''
+ destination = '/queue/test3'
+
+ # send
+ self.conn.send(destination, "hello thar", receipt="foo")
+ self.listener.wait(3)
+ self.conn.disconnect()
+
+ # now receive
+ conn2 = self.create_connection()
+ try:
+ listener2 = base.WaitableListener()
+ conn2.set_listener('', listener2)
+
+ self.subscribe_dest(conn2, destination, None, ack="auto")
+ self.assertTrue(listener2.wait(10), "no receive")
+ finally:
+ conn2.disconnect()
+
+
+ def test_multi_subscribers(self):
+ ''' Test multiple subscribers against a single /queue destination '''
+ destination = '/queue/test-multi'
+
+ ## set up two subscribers
+ conn1, listener1 = self.create_subscriber_connection(destination)
+ conn2, listener2 = self.create_subscriber_connection(destination)
+
+ try:
+ ## now send
+ self.conn.send(destination, "test1")
+ self.conn.send(destination, "test2")
+
+ ## expect both consumers to get a message?
+ self.assertTrue(listener1.wait(2))
+ self.assertEquals(1, len(listener1.messages),
+ "unexpected message count")
+ self.assertTrue(listener2.wait(2))
+ self.assertEquals(1, len(listener2.messages),
+ "unexpected message count")
+ finally:
+ conn1.disconnect()
+ conn2.disconnect()
+
+ def test_send_with_receipt(self):
+ destination = '/queue/test-receipt'
+ def noop(): pass
+ self.__test_send_receipt(destination, noop, noop)
+
+ def test_send_with_receipt_tx(self):
+ destination = '/queue/test-receipt-tx'
+ tx = 'receipt.tx'
+
+ def before():
+ self.conn.begin(transaction=tx)
+
+ def after():
+ self.assertFalse(self.listener.wait(1))
+ self.conn.commit(transaction=tx)
+
+ self.__test_send_receipt(destination, before, after, {'transaction': tx})
+
+ def test_interleaved_receipt_no_receipt(self):
+ ''' Test i-leaved receipt/no receipt, no-r bracketed by rs '''
+
+ destination = '/queue/ir'
+
+ self.listener.reset(5)
+
+ self.subscribe_dest(self.conn, destination, None, ack="auto")
+ self.conn.send(destination, 'first', receipt='a')
+ self.conn.send(destination, 'second')
+ self.conn.send(destination, 'third', receipt='b')
+
+ self.assertListener("Missing messages/receipts", numMsgs=3, numRcts=2, timeout=3)
+
+ self.assertEquals(set(['a','b']), self.__gather_receipts())
+
+ def test_interleaved_receipt_no_receipt_tx(self):
+ ''' Test i-leaved receipt/no receipt, no-r bracketed by r+xactions '''
+
+ destination = '/queue/ir'
+ tx = 'tx.ir'
+
+ # three messages and two receipts
+ self.listener.reset(5)
+
+ self.subscribe_dest(self.conn, destination, None, ack="auto")
+ self.conn.begin(transaction=tx)
+
+ self.conn.send(destination, 'first', receipt='a', transaction=tx)
+ self.conn.send(destination, 'second', transaction=tx)
+ self.conn.send(destination, 'third', receipt='b', transaction=tx)
+ self.conn.commit(transaction=tx)
+
+ self.assertListener("Missing messages/receipts", numMsgs=3, numRcts=2, timeout=40)
+
+ expected = set(['a', 'b'])
+ missing = expected.difference(self.__gather_receipts())
+
+ self.assertEquals(set(), missing, "Missing receipts: " + str(missing))
+
+ def test_interleaved_receipt_no_receipt_inverse(self):
+ ''' Test i-leaved receipt/no receipt, r bracketed by no-rs '''
+
+ destination = '/queue/ir'
+
+ self.listener.reset(4)
+
+ self.subscribe_dest(self.conn, destination, None, ack="auto")
+ self.conn.send(destination, 'first')
+ self.conn.send(destination, 'second', receipt='a')
+ self.conn.send(destination, 'third')
+
+ self.assertListener("Missing messages/receipt", numMsgs=3, numRcts=1, timeout=3)
+
+ self.assertEquals(set(['a']), self.__gather_receipts())
+
+ def __test_send_receipt(self, destination, before, after, headers = {}):
+ count = 50
+ self.listener.reset(count)
+
+ before()
+ expected_receipts = set()
+
+ for x in range(0, count):
+ receipt = "test" + str(x)
+ expected_receipts.add(receipt)
+ self.conn.send(destination, "test receipt",
+ receipt=receipt, headers=headers)
+ after()
+
+ self.assertTrue(self.listener.wait(5))
+
+ missing_receipts = expected_receipts.difference(
+ self.__gather_receipts())
+
+ self.assertEquals(set(), missing_receipts,
+ "missing receipts: " + str(missing_receipts))
+
+ def __gather_receipts(self):
+ result = set()
+ for r in self.listener.receipts:
+ result.add(r['headers']['receipt-id'])
+ return result
+
+class TestTopic(base.BaseTest):
+
+ def test_send_receive(self):
+ ''' Test basic send/receive for /topic '''
+ destination = '/topic/test'
+ self.simple_test_send_rec(destination)
+
+ def test_send_multiple(self):
+ ''' Test /topic with multiple consumers '''
+ destination = '/topic/multiple'
+
+ ## set up two subscribers
+ conn1, listener1 = self.create_subscriber_connection(destination)
+ conn2, listener2 = self.create_subscriber_connection(destination)
+
+ try:
+ ## listeners are expecting 2 messages
+ listener1.reset(2)
+ listener2.reset(2)
+
+ ## now send
+ self.conn.send(destination, "test1")
+ self.conn.send(destination, "test2")
+
+ ## expect both consumers to get both messages
+ self.assertTrue(listener1.wait(5))
+ self.assertEquals(2, len(listener1.messages),
+ "unexpected message count")
+ self.assertTrue(listener2.wait(5))
+ self.assertEquals(2, len(listener2.messages),
+ "unexpected message count")
+ finally:
+ conn1.disconnect()
+ conn2.disconnect()
+
+ def test_send_multiple_with_a_large_message(self):
+ ''' Test /topic with multiple consumers '''
+ destination = '/topic/16mb'
+ # payload size
+ s = 1024 * 1024 * 16
+ message = 'x' * s
+
+ conn1, listener1 = self.create_subscriber_connection(destination)
+ conn2, listener2 = self.create_subscriber_connection(destination)
+
+ try:
+ listener1.reset(2)
+ listener2.reset(2)
+
+ self.conn.send(destination, message)
+ self.conn.send(destination, message)
+
+ self.assertTrue(listener1.wait(10))
+ self.assertEquals(2, len(listener1.messages),
+ "unexpected message count")
+ self.assertTrue(len(listener2.messages[0]['message']) == s,
+ "unexpected message size")
+
+ self.assertTrue(listener2.wait(10))
+ self.assertEquals(2, len(listener2.messages),
+ "unexpected message count")
+ finally:
+ conn1.disconnect()
+ conn2.disconnect()
+
+class TestReplyQueue(base.BaseTest):
+
+ def test_reply_queue(self):
+ ''' Test with two separate clients. Client 1 sends
+ message to a known destination with a defined reply
+ queue. Client 2 receives on known destination and replies
+ on the reply destination. Client 1 gets the reply message'''
+
+ known = '/queue/known'
+ reply = '/temp-queue/0'
+
+ ## Client 1 uses pre-supplied connection and listener
+ ## Set up client 2
+ conn2, listener2 = self.create_subscriber_connection(known)
+
+ try:
+ self.conn.send(known, "test",
+ headers = {"reply-to": reply})
+
+ self.assertTrue(listener2.wait(5))
+ self.assertEquals(1, len(listener2.messages))
+
+ reply_to = listener2.messages[0]['headers']['reply-to']
+ self.assertTrue(reply_to.startswith('/reply-queue/'))
+
+ conn2.send(reply_to, "reply")
+ self.assertTrue(self.listener.wait(5))
+ self.assertEquals("reply", self.listener.messages[0]['message'])
+ finally:
+ conn2.disconnect()
+
+ def test_reuse_reply_queue(self):
+ ''' Test re-use of reply-to queue '''
+
+ known2 = '/queue/known2'
+ known3 = '/queue/known3'
+ reply = '/temp-queue/foo'
+
+ def respond(cntn, listna):
+ self.assertTrue(listna.wait(5))
+ self.assertEquals(1, len(listna.messages))
+ reply_to = listna.messages[0]['headers']['reply-to']
+ self.assertTrue(reply_to.startswith('/reply-queue/'))
+ cntn.send(reply_to, "reply")
+
+ ## Client 1 uses pre-supplied connection and listener
+ ## Set up clients 2 and 3
+ conn2, listener2 = self.create_subscriber_connection(known2)
+ conn3, listener3 = self.create_subscriber_connection(known3)
+ try:
+ self.listener.reset(2)
+ self.conn.send(known2, "test2",
+ headers = {"reply-to": reply})
+ self.conn.send(known3, "test3",
+ headers = {"reply-to": reply})
+ respond(conn2, listener2)
+ respond(conn3, listener3)
+
+ self.assertTrue(self.listener.wait(5))
+ self.assertEquals(2, len(self.listener.messages))
+ self.assertEquals("reply", self.listener.messages[0]['message'])
+ self.assertEquals("reply", self.listener.messages[1]['message'])
+ finally:
+ conn2.disconnect()
+ conn3.disconnect()
+
+ def test_perm_reply_queue(self):
+ '''As test_reply_queue, but with a non-temp reply queue'''
+
+ known = '/queue/known'
+ reply = '/queue/reply'
+
+ ## Client 1 uses pre-supplied connection and listener
+ ## Set up client 2
+ conn1, listener1 = self.create_subscriber_connection(reply)
+ conn2, listener2 = self.create_subscriber_connection(known)
+
+ try:
+ conn1.send(known, "test",
+ headers = {"reply-to": reply})
+
+ self.assertTrue(listener2.wait(5))
+ self.assertEquals(1, len(listener2.messages))
+
+ reply_to = listener2.messages[0]['headers']['reply-to']
+ self.assertTrue(reply_to == reply)
+
+ conn2.send(reply_to, "reply")
+ self.assertTrue(listener1.wait(5))
+ self.assertEquals("reply", listener1.messages[0]['message'])
+ finally:
+ conn1.disconnect()
+ conn2.disconnect()
+
+class TestDurableSubscription(base.BaseTest):
+
+ ID = 'test.subscription'
+
+ def __subscribe(self, dest, conn=None, id=None):
+ if not conn:
+ conn = self.conn
+ if not id:
+ id = TestDurableSubscription.ID
+
+ self.subscribe_dest(conn, dest, id, ack="auto",
+ headers = {'durable': 'true',
+ 'receipt': 1,
+ 'auto-delete': False})
+
+ def __assert_receipt(self, listener=None, pos=None):
+ if not listener:
+ listener = self.listener
+
+ self.assertTrue(listener.wait(5))
+ self.assertEquals(1, len(self.listener.receipts))
+ if pos is not None:
+ self.assertEquals(pos, self.listener.receipts[0]['msg_no'])
+
+ def __assert_message(self, msg, listener=None, pos=None):
+ if not listener:
+ listener = self.listener
+
+ self.assertTrue(listener.wait(5))
+ self.assertEquals(1, len(listener.messages))
+ self.assertEquals(msg, listener.messages[0]['message'])
+ if pos is not None:
+ self.assertEquals(pos, self.listener.messages[0]['msg_no'])
+
+ def do_test_durable_subscription(self, durability_header):
+ destination = '/topic/durable'
+
+ self.__subscribe(destination)
+ self.__assert_receipt()
+
+ # send first message without unsubscribing
+ self.listener.reset(1)
+ self.conn.send(destination, "first")
+ self.__assert_message("first")
+
+ # now unsubscribe (disconnect only)
+ self.unsubscribe_dest(self.conn, destination, TestDurableSubscription.ID)
+
+ # send again
+ self.listener.reset(2)
+ self.conn.send(destination, "second")
+
+ # resubscribe and expect receipt
+ self.__subscribe(destination)
+ self.__assert_receipt(pos=1)
+ # and message
+ self.__assert_message("second", pos=2)
+
+ # now unsubscribe (cancel)
+ self.unsubscribe_dest(self.conn, destination, TestDurableSubscription.ID,
+ headers={durability_header: 'true'})
+
+ # send again
+ self.listener.reset(1)
+ self.conn.send(destination, "third")
+
+ # resubscribe and expect no message
+ self.__subscribe(destination)
+ self.assertTrue(self.listener.wait(3))
+ self.assertEquals(0, len(self.listener.messages))
+ self.assertEquals(1, len(self.listener.receipts))
+
+ def test_durable_subscription(self):
+ self.do_test_durable_subscription('durable')
+
+ def test_durable_subscription_and_legacy_header(self):
+ self.do_test_durable_subscription('persistent')
+
+ def test_share_subscription(self):
+ destination = '/topic/durable-shared'
+
+ conn2 = self.create_connection()
+ conn2.set_listener('', self.listener)
+
+ try:
+ self.__subscribe(destination)
+ self.__assert_receipt()
+ self.listener.reset(1)
+ self.__subscribe(destination, conn2)
+ self.__assert_receipt()
+
+ self.listener.reset(100)
+
+ # send 100 messages
+ for x in range(0, 100):
+ self.conn.send(destination, "msg" + str(x))
+
+ self.assertTrue(self.listener.wait(5))
+ self.assertEquals(100, len(self.listener.messages))
+ finally:
+ conn2.disconnect()
+
+ def test_separate_ids(self):
+ destination = '/topic/durable-separate'
+
+ conn2 = self.create_connection()
+ listener2 = base.WaitableListener()
+ conn2.set_listener('', listener2)
+
+ try:
+ # ensure durable subscription exists for each ID
+ self.__subscribe(destination)
+ self.__assert_receipt()
+ self.__subscribe(destination, conn2, "other.id")
+ self.__assert_receipt(listener2)
+ self.unsubscribe_dest(self.conn, destination, TestDurableSubscription.ID)
+ self.unsubscribe_dest(conn2, destination, "other.id")
+
+ self.listener.reset(101)
+ listener2.reset(101) ## 100 messages and 1 receipt
+
+ # send 100 messages
+ for x in range(0, 100):
+ self.conn.send(destination, "msg" + str(x))
+
+ self.__subscribe(destination)
+ self.__subscribe(destination, conn2, "other.id")
+
+ for l in [self.listener, listener2]:
+ self.assertTrue(l.wait(20))
+ self.assertTrue(len(l.messages) >= 90)
+ self.assertTrue(len(l.messages) <= 100)
+
+ finally:
+ conn2.disconnect()
+
+ def do_test_durable_subscribe_no_id_and_header(self, header):
+ destination = '/topic/durable-invalid'
+
+ self.conn.send_frame('SUBSCRIBE',
+ {'destination': destination, 'ack': 'auto', header: 'true'})
+ self.listener.wait(3)
+ self.assertEquals(1, len(self.listener.errors))
+ self.assertEquals("Missing Header", self.listener.errors[0]['headers']['message'])
+
+ def test_durable_subscribe_no_id(self):
+ self.do_test_durable_subscribe_no_id_and_header('durable')
+
+ def test_durable_subscribe_no_id_and_legacy_header(self):
+ self.do_test_durable_subscribe_no_id_and_header('persistent')
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py
new file mode 100644
index 0000000000..884ada50e8
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/errors.py
@@ -0,0 +1,101 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import time
+
+class TestErrorsAndCloseConnection(base.BaseTest):
+ def __test_duplicate_consumer_tag_with_headers(self, destination, headers):
+ self.subscribe_dest(self.conn, destination, None,
+ headers = headers)
+
+ self.subscribe_dest(self.conn, destination, None,
+ headers = headers)
+
+ self.assertTrue(self.listener.wait())
+
+ self.assertEquals(1, len(self.listener.errors))
+ errorReceived = self.listener.errors[0]
+ self.assertEquals("Duplicated subscription identifier", errorReceived['headers']['message'])
+ self.assertEquals("A subscription identified by 'T_1' already exists.", errorReceived['message'])
+ time.sleep(2)
+ self.assertFalse(self.conn.is_connected())
+
+
+ def test_duplicate_consumer_tag_with_transient_destination(self):
+ destination = "/exchange/amq.direct/duplicate-consumer-tag-test1"
+ self.__test_duplicate_consumer_tag_with_headers(destination, {'id': 1})
+
+ def test_duplicate_consumer_tag_with_durable_destination(self):
+ destination = "/queue/duplicate-consumer-tag-test2"
+ self.__test_duplicate_consumer_tag_with_headers(destination, {'id': 1,
+ 'persistent': True})
+
+
+class TestErrors(base.BaseTest):
+
+ def test_invalid_queue_destination(self):
+ self.__test_invalid_destination("queue", "/bah/baz")
+
+ def test_invalid_empty_queue_destination(self):
+ self.__test_invalid_destination("queue", "")
+
+ def test_invalid_topic_destination(self):
+ self.__test_invalid_destination("topic", "/bah/baz")
+
+ def test_invalid_empty_topic_destination(self):
+ self.__test_invalid_destination("topic", "")
+
+ def test_invalid_exchange_destination(self):
+ self.__test_invalid_destination("exchange", "/bah/baz/boo")
+
+ def test_invalid_empty_exchange_destination(self):
+ self.__test_invalid_destination("exchange", "")
+
+ def test_invalid_default_exchange_destination(self):
+ self.__test_invalid_destination("exchange", "//foo")
+
+ def test_unknown_destination(self):
+ self.listener.reset()
+ self.conn.send("/something/interesting", 'test_unknown_destination')
+
+ self.assertTrue(self.listener.wait())
+ self.assertEquals(1, len(self.listener.errors))
+
+ err = self.listener.errors[0]
+ self.assertEquals("Unknown destination", err['headers']['message'])
+
+ def test_send_missing_destination(self):
+ self.__test_missing_destination("SEND")
+
+ def test_send_missing_destination(self):
+ self.__test_missing_destination("SUBSCRIBE")
+
+ def __test_missing_destination(self, command):
+ self.listener.reset()
+ self.conn.send_frame(command)
+
+ self.assertTrue(self.listener.wait())
+ self.assertEquals(1, len(self.listener.errors))
+
+ err = self.listener.errors[0]
+ self.assertEquals("Missing destination", err['headers']['message'])
+
+ def __test_invalid_destination(self, dtype, content):
+ self.listener.reset()
+ self.conn.send("/" + dtype + content, '__test_invalid_destination:' + dtype + content)
+
+ self.assertTrue(self.listener.wait())
+ self.assertEquals(1, len(self.listener.errors))
+
+ err = self.listener.errors[0]
+ self.assertEquals("Invalid destination", err['headers']['message'])
+ self.assertEquals("'" + content + "' is not a valid " +
+ dtype + " destination\n",
+ err['message'])
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/lifecycle.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/lifecycle.py
new file mode 100644
index 0000000000..d7b558e7b5
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/lifecycle.py
@@ -0,0 +1,187 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import time
+
+class TestLifecycle(base.BaseTest):
+
+ def test_unsubscribe_exchange_destination(self):
+ ''' Test UNSUBSCRIBE command with exchange'''
+ d = "/exchange/amq.fanout"
+ self.unsub_test(d, self.sub_and_send(d))
+
+ def test_unsubscribe_exchange_destination_with_receipt(self):
+ ''' Test receipted UNSUBSCRIBE command with exchange'''
+ d = "/exchange/amq.fanout"
+ self.unsub_test(d, self.sub_and_send(d, receipt="unsub.rct"), numRcts=1)
+
+ def test_unsubscribe_queue_destination(self):
+ ''' Test UNSUBSCRIBE command with queue'''
+ d = "/queue/unsub01"
+ self.unsub_test(d, self.sub_and_send(d))
+
+ def test_unsubscribe_queue_destination_with_receipt(self):
+ ''' Test receipted UNSUBSCRIBE command with queue'''
+ d = "/queue/unsub02"
+ self.unsub_test(d, self.sub_and_send(d, receipt="unsub.rct"), numRcts=1)
+
+ def test_unsubscribe_exchange_id(self):
+ ''' Test UNSUBSCRIBE command with exchange by id'''
+ d = "/exchange/amq.fanout"
+ self.unsub_test(d, self.sub_and_send(d, subid="exchid"))
+
+ def test_unsubscribe_exchange_id_with_receipt(self):
+ ''' Test receipted UNSUBSCRIBE command with exchange by id'''
+ d = "/exchange/amq.fanout"
+ self.unsub_test(d, self.sub_and_send(d, subid="exchid", receipt="unsub.rct"), numRcts=1)
+
+ def test_unsubscribe_queue_id(self):
+ ''' Test UNSUBSCRIBE command with queue by id'''
+ d = "/queue/unsub03"
+ self.unsub_test(d, self.sub_and_send(d, subid="queid"))
+
+ def test_unsubscribe_queue_id_with_receipt(self):
+ ''' Test receipted UNSUBSCRIBE command with queue by id'''
+ d = "/queue/unsub04"
+ self.unsub_test(d, self.sub_and_send(d, subid="queid", receipt="unsub.rct"), numRcts=1)
+
+ def test_connect_version_1_0(self):
+ ''' Test CONNECT with version 1.0'''
+ self.conn.disconnect()
+ new_conn = self.create_connection(version="1.0")
+ try:
+ self.assertTrue(new_conn.is_connected())
+ finally:
+ new_conn.disconnect()
+ self.assertFalse(new_conn.is_connected())
+
+ def test_connect_version_1_1(self):
+ ''' Test CONNECT with version 1.1'''
+ self.conn.disconnect()
+ new_conn = self.create_connection(version="1.1")
+ try:
+ self.assertTrue(new_conn.is_connected())
+ finally:
+ new_conn.disconnect()
+ self.assertFalse(new_conn.is_connected())
+
+ def test_connect_version_1_2(self):
+ ''' Test CONNECT with version 1.2'''
+ self.conn.disconnect()
+ new_conn = self.create_connection(version="1.2")
+ try:
+ self.assertTrue(new_conn.is_connected())
+ finally:
+ new_conn.disconnect()
+ self.assertFalse(new_conn.is_connected())
+
+ def test_heartbeat_disconnects_client(self):
+ ''' Test heart-beat disconnection'''
+ self.conn.disconnect()
+ new_conn = self.create_connection(version='1.1', heartbeats=(1500, 0))
+ try:
+ self.assertTrue(new_conn.is_connected())
+ time.sleep(1)
+ self.assertTrue(new_conn.is_connected())
+ time.sleep(3)
+ self.assertFalse(new_conn.is_connected())
+ finally:
+ if new_conn.is_connected():
+ new_conn.disconnect()
+
+ def test_unsupported_version(self):
+ ''' Test unsupported version on CONNECT command'''
+ self.bad_connect("Supported versions are 1.0,1.1,1.2\n", version='100.1')
+
+ def test_bad_username(self):
+ ''' Test bad username'''
+ self.bad_connect("Access refused for user 'gust'\n", user='gust')
+
+ def test_bad_password(self):
+ ''' Test bad password'''
+ self.bad_connect("Access refused for user 'guest'\n", passcode='gust')
+
+ def test_bad_vhost(self):
+ ''' Test bad virtual host'''
+ self.bad_connect("Virtual host '//' access denied", version='1.1', vhost='//')
+
+ def bad_connect(self, expected, user='guest', passcode='guest', **kwargs):
+ self.conn.disconnect()
+ new_conn = self.create_connection_obj(**kwargs)
+ listener = base.WaitableListener()
+ new_conn.set_listener('', listener)
+ try:
+ new_conn.start()
+ new_conn.connect(user, passcode)
+ self.assertTrue(listener.wait())
+ self.assertEquals(expected, listener.errors[0]['message'])
+ finally:
+ if new_conn.is_connected():
+ new_conn.disconnect()
+
+ def test_bad_header_on_send(self):
+ ''' Test disallowed header on SEND '''
+ self.listener.reset(1)
+ self.conn.send_frame("SEND", {"destination":"a", "message-id":"1"})
+ self.assertTrue(self.listener.wait())
+ self.assertEquals(1, len(self.listener.errors))
+ errorReceived = self.listener.errors[0]
+ self.assertEquals("Invalid header", errorReceived['headers']['message'])
+ self.assertEquals("'message-id' is not allowed on 'SEND'.\n", errorReceived['message'])
+
+ def test_send_recv_header(self):
+ ''' Test sending a custom header and receiving it back '''
+ dest = '/queue/custom-header'
+ hdrs = {'x-custom-header-1': 'value1',
+ 'x-custom-header-2': 'value2',
+ 'custom-header-3': 'value3'}
+ self.listener.reset(1)
+ recv_hdrs = self.simple_test_send_rec(dest, headers=hdrs)
+ self.assertEquals('value1', recv_hdrs['x-custom-header-1'])
+ self.assertEquals('value2', recv_hdrs['x-custom-header-2'])
+ self.assertEquals('value3', recv_hdrs['custom-header-3'])
+
+ def test_disconnect(self):
+ ''' Test DISCONNECT command'''
+ self.conn.disconnect()
+ self.assertFalse(self.conn.is_connected())
+
+ def test_disconnect_with_receipt(self):
+ ''' Test the DISCONNECT command with receipts '''
+ time.sleep(3)
+ self.listener.reset(1)
+ self.conn.send_frame("DISCONNECT", {"receipt": "test"})
+ self.assertTrue(self.listener.wait())
+ self.assertEquals(1, len(self.listener.receipts))
+ receiptReceived = self.listener.receipts[0]['headers']['receipt-id']
+ self.assertEquals("test", receiptReceived
+ , "Wrong receipt received: '" + receiptReceived + "'")
+
+ def unsub_test(self, dest, verbs, numRcts=0):
+ def afterfun():
+ self.conn.send(dest, "after-test")
+ subverb, unsubverb = verbs
+ self.assertListenerAfter(subverb, numMsgs=1,
+ errMsg="FAILED to subscribe and send")
+ self.assertListenerAfter(unsubverb, numRcts=numRcts,
+ errMsg="Incorrect responses from UNSUBSCRIBE")
+ self.assertListenerAfter(afterfun,
+ errMsg="Still receiving messages")
+
+ def sub_and_send(self, dest, subid=None, receipt=None):
+ def subfun():
+ self.subscribe_dest(self.conn, dest, subid)
+ self.conn.send(dest, "test")
+ def unsubfun():
+ headers = {}
+ if receipt != None:
+ headers['receipt'] = receipt
+ self.unsubscribe_dest(self.conn, dest, subid, **headers)
+ return subfun, unsubfun
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py
new file mode 100644
index 0000000000..40f908c5d9
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/parsing.py
@@ -0,0 +1,331 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import re
+import socket
+import functools
+import time
+import sys
+import os
+
+def connect(cnames):
+ ''' Decorator that creates stomp connections and issues CONNECT '''
+ cmd=('CONNECT\n'
+ 'login:guest\n'
+ 'passcode:guest\n'
+ '\n'
+ '\n\0')
+ resp = ('CONNECTED\n'
+ 'server:RabbitMQ/(.*)\n'
+ 'session:(.*)\n'
+ 'heart-beat:0,0\n'
+ 'version:1.0\n'
+ '\n\x00')
+ def w(m):
+ @functools.wraps(m)
+ def wrapper(self, *args, **kwargs):
+ for cname in cnames:
+ sd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sd.settimeout(30000)
+ sd.connect((self.host, self.port))
+ sd.sendall(cmd.encode('utf-8'))
+ self.match(resp, sd.recv(4096).decode('utf-8'))
+ setattr(self, cname, sd)
+ try:
+ r = m(self, *args, **kwargs)
+ finally:
+ for cname in cnames:
+ try:
+ getattr(self, cname).close()
+ except IOError:
+ pass
+ return r
+ return wrapper
+ return w
+
+
+class TestParsing(unittest.TestCase):
+ host='127.0.0.1'
+ # The default port is 61613 but it's in the middle of the ephemeral
+ # ports range on many operating systems. Therefore, there is a
+ # chance this port is already in use. Let's use a port close to the
+ # AMQP default port.
+ port=int(os.environ["STOMP_PORT"])
+
+
+ def match(self, pattern, data):
+ ''' helper: try to match a regexp with a string.
+ Fail test if they do not match.
+ '''
+ matched = re.match(pattern, data)
+ if matched:
+ return matched.groups()
+ self.assertTrue(False, 'No match:\n{}\n\n{}'.format(pattern, data))
+
+ def recv_atleast(self, bufsize):
+ recvhead = []
+ rl = bufsize
+ while rl > 0:
+ buf = self.cd.recv(rl).decode('utf-8')
+ bl = len(buf)
+ if bl==0: break
+ recvhead.append( buf )
+ rl -= bl
+ return ''.join(recvhead)
+
+
+ @connect(['cd'])
+ def test_newline_after_nul(self):
+ cmd = ('\n'
+ 'SUBSCRIBE\n'
+ 'destination:/exchange/amq.fanout\n'
+ '\n\x00\n'
+ 'SEND\n'
+ 'content-type:text/plain\n'
+ 'destination:/exchange/amq.fanout\n\n'
+ 'hello\n\x00\n')
+ self.cd.sendall(cmd.encode('utf-8'))
+ resp = ('MESSAGE\n'
+ 'destination:/exchange/amq.fanout\n'
+ 'message-id:Q_/exchange/amq.fanout@@session-(.*)\n'
+ 'redelivered:false\n'
+ 'content-type:text/plain\n'
+ 'content-length:6\n'
+ '\n'
+ 'hello\n\0')
+ self.match(resp, self.cd.recv(4096).decode('utf-8'))
+
+ @connect(['cd'])
+ def test_send_without_content_type(self):
+ cmd = ('\n'
+ 'SUBSCRIBE\n'
+ 'destination:/exchange/amq.fanout\n'
+ '\n\x00\n'
+ 'SEND\n'
+ 'destination:/exchange/amq.fanout\n\n'
+ 'hello\n\x00')
+ self.cd.sendall(cmd.encode('utf-8'))
+ resp = ('MESSAGE\n'
+ 'destination:/exchange/amq.fanout\n'
+ 'message-id:Q_/exchange/amq.fanout@@session-(.*)\n'
+ 'redelivered:false\n'
+ 'content-length:6\n'
+ '\n'
+ 'hello\n\0')
+ self.match(resp, self.cd.recv(4096).decode('utf-8'))
+
+ @connect(['cd'])
+ def test_send_without_content_type_binary(self):
+ msg = 'hello'
+ cmd = ('\n'
+ 'SUBSCRIBE\n'
+ 'destination:/exchange/amq.fanout\n'
+ '\n\x00\n'
+ 'SEND\n'
+ 'destination:/exchange/amq.fanout\n' +
+ 'content-length:{}\n\n'.format(len(msg)) +
+ '{}\x00'.format(msg))
+ self.cd.sendall(cmd.encode('utf-8'))
+ resp = ('MESSAGE\n'
+ 'destination:/exchange/amq.fanout\n'
+ 'message-id:Q_/exchange/amq.fanout@@session-(.*)\n'
+ 'redelivered:false\n' +
+ 'content-length:{}\n'.format(len(msg)) +
+ '\n{}\0'.format(msg))
+ self.match(resp, self.cd.recv(4096).decode('utf-8'))
+
+ @connect(['cd'])
+ def test_newline_after_nul_and_leading_nul(self):
+ cmd = ('\n'
+ '\x00SUBSCRIBE\n'
+ 'destination:/exchange/amq.fanout\n'
+ '\n\x00\n'
+ '\x00SEND\n'
+ 'destination:/exchange/amq.fanout\n'
+ 'content-type:text/plain\n'
+ '\nhello\n\x00\n')
+ self.cd.sendall(cmd.encode('utf-8'))
+ resp = ('MESSAGE\n'
+ 'destination:/exchange/amq.fanout\n'
+ 'message-id:Q_/exchange/amq.fanout@@session-(.*)\n'
+ 'redelivered:false\n'
+ 'content-type:text/plain\n'
+ 'content-length:6\n'
+ '\n'
+ 'hello\n\0')
+ self.match(resp, self.cd.recv(4096).decode('utf-8'))
+
+ @connect(['cd'])
+ def test_bad_command(self):
+ ''' Trigger an error message. '''
+ cmd = ('WRONGCOMMAND\n'
+ 'destination:a\n'
+ 'exchange:amq.fanout\n'
+ '\n\0')
+ self.cd.sendall(cmd.encode('utf-8'))
+ resp = ('ERROR\n'
+ 'message:Bad command\n'
+ 'content-type:text/plain\n'
+ 'version:1.0,1.1,1.2\n'
+ 'content-length:43\n'
+ '\n'
+ 'Could not interpret command "WRONGCOMMAND"\n'
+ '\0')
+ self.match(resp, self.cd.recv(4096).decode('utf-8'))
+
+ @connect(['sd', 'cd1', 'cd2'])
+ def test_broadcast(self):
+ ''' Single message should be delivered to two consumers:
+ amq.topic --routing_key--> first_queue --> first_connection
+ \--routing_key--> second_queue--> second_connection
+ '''
+ subscribe=( 'SUBSCRIBE\n'
+ 'id: XsKNhAf\n'
+ 'destination:/exchange/amq.topic/da9d4779\n'
+ '\n\0')
+ for cd in [self.cd1, self.cd2]:
+ cd.sendall(subscribe.encode('utf-8'))
+
+ time.sleep(0.1)
+
+ cmd = ('SEND\n'
+ 'content-type:text/plain\n'
+ 'destination:/exchange/amq.topic/da9d4779\n'
+ '\n'
+ 'message'
+ '\n\0')
+ self.sd.sendall(cmd.encode('utf-8'))
+
+ resp=('MESSAGE\n'
+ 'subscription:(.*)\n'
+ 'destination:/topic/da9d4779\n'
+ 'message-id:(.*)\n'
+ 'redelivered:false\n'
+ 'content-type:text/plain\n'
+ 'content-length:8\n'
+ '\n'
+ 'message'
+ '\n\x00')
+ for cd in [self.cd1, self.cd2]:
+ self.match(resp, cd.recv(4096).decode('utf-8'))
+
+ @connect(['cd'])
+ def test_message_with_embedded_nulls(self):
+ ''' Test sending/receiving message with embedded nulls. '''
+ dest='destination:/exchange/amq.topic/test_embed_nulls_message\n'
+ resp_dest='destination:/topic/test_embed_nulls_message\n'
+ subscribe=( 'SUBSCRIBE\n'
+ 'id:xxx\n'
+ +dest+
+ '\n\0')
+ self.cd.sendall(subscribe.encode('utf-8'))
+
+ boilerplate = '0123456789'*1024 # large enough boilerplate
+ message = '01'
+ oldi = 2
+ for i in [5, 90, 256-1, 384-1, 512, 1024, 1024+256+64+32]:
+ message = message + '\0' + boilerplate[oldi+1:i]
+ oldi = i
+ msg_len = len(message)
+
+ cmd = ('SEND\n'
+ +dest+
+ 'content-type:text/plain\n'
+ 'content-length:%i\n'
+ '\n'
+ '%s'
+ '\0' % (len(message), message))
+ self.cd.sendall(cmd.encode('utf-8'))
+
+ headresp=('MESSAGE\n' # 8
+ 'subscription:(.*)\n' # 14 + subscription
+ +resp_dest+ # 44
+ 'message-id:(.*)\n' # 12 + message-id
+ 'redelivered:false\n' # 18
+ 'content-type:text/plain\n' # 24
+ 'content-length:%i\n' # 16 + 4==len('1024')
+ '\n' # 1
+ '(.*)$' # prefix of body+null (potentially)
+ % len(message) )
+ headlen = 8 + 24 + 14 + (3) + 44 + 12 + 18 + (48) + 16 + (4) + 1 + (1)
+
+ headbuf = self.recv_atleast(headlen)
+ self.assertFalse(len(headbuf) == 0)
+
+ (sub, msg_id, bodyprefix) = self.match(headresp, headbuf)
+ bodyresp=( '%s\0' % message )
+ bodylen = len(bodyresp);
+
+ bodybuf = ''.join([bodyprefix,
+ self.recv_atleast(bodylen - len(bodyprefix))])
+
+ self.assertEqual(len(bodybuf), msg_len+1,
+ "body received not the same length as message sent")
+ self.assertEqual(bodybuf, bodyresp,
+ " body (...'%s')\nincorrectly returned as (...'%s')"
+ % (bodyresp[-10:], bodybuf[-10:]))
+
+ @connect(['cd'])
+ def test_message_in_packets(self):
+ ''' Test sending/receiving message in packets. '''
+ base_dest='topic/test_embed_nulls_message\n'
+ dest='destination:/exchange/amq.' + base_dest
+ resp_dest='destination:/'+ base_dest
+ subscribe=( 'SUBSCRIBE\n'
+ 'id:xxx\n'
+ +dest+
+ '\n\0')
+ self.cd.sendall(subscribe.encode('utf-8'))
+
+ boilerplate = '0123456789'*1024 # large enough boilerplate
+
+ message = boilerplate[:1024 + 512 + 256 + 32]
+ msg_len = len(message)
+
+ msg_to_send = ('SEND\n'
+ +dest+
+ 'content-type:text/plain\n'
+ '\n'
+ '%s'
+ '\0' % (message) )
+ packet_size = 191
+ part_index = 0
+ msg_to_send_len = len(msg_to_send)
+ while part_index < msg_to_send_len:
+ part = msg_to_send[part_index:part_index+packet_size]
+ time.sleep(0.1)
+ self.cd.sendall(part.encode('utf-8'))
+ part_index += packet_size
+
+ headresp=('MESSAGE\n' # 8
+ 'subscription:(.*)\n' # 14 + subscription
+ +resp_dest+ # 44
+ 'message-id:(.*)\n' # 12 + message-id
+ 'redelivered:false\n' # 18
+ 'content-type:text/plain\n' # 24
+ 'content-length:%i\n' # 16 + 4==len('1024')
+ '\n' # 1
+ '(.*)$' # prefix of body+null (potentially)
+ % len(message) )
+ headlen = 8 + 24 + 14 + (3) + 44 + 12 + 18 + (48) + 16 + (4) + 1 + (1)
+
+ headbuf = self.recv_atleast(headlen)
+ self.assertFalse(len(headbuf) == 0)
+
+ (sub, msg_id, bodyprefix) = self.match(headresp, headbuf)
+ bodyresp=( '%s\0' % message )
+ bodylen = len(bodyresp);
+
+ bodybuf = ''.join([bodyprefix,
+ self.recv_atleast(bodylen - len(bodyprefix))])
+
+ self.assertEqual(len(bodybuf), msg_len+1,
+ "body received not the same length as message sent")
+ self.assertEqual(bodybuf, bodyresp,
+ " body ('%s')\nincorrectly returned as ('%s')"
+ % (bodyresp, bodybuf))
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py
new file mode 100644
index 0000000000..3761c92360
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/queue_properties.py
@@ -0,0 +1,87 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import pika
+import base
+import time
+import os
+
+class TestQueueProperties(base.BaseTest):
+
+ def test_subscribe(self):
+ destination = "/queue/queue-properties-subscribe-test"
+
+ # subscribe
+ self.subscribe_dest(self.conn, destination, None,
+ headers={
+ 'x-message-ttl': 60000,
+ 'x-expires': 70000,
+ 'x-max-length': 10,
+ 'x-max-length-bytes': 20000,
+ 'x-dead-letter-exchange': 'dead-letter-exchange',
+ 'x-dead-letter-routing-key': 'dead-letter-routing-key',
+ 'x-max-priority': 6,
+ })
+
+ # now try to declare the queue using pika
+ # if the properties are the same we should
+ # not get any error
+ connection = pika.BlockingConnection(pika.ConnectionParameters(
+ host='127.0.0.1', port=int(os.environ["AMQP_PORT"])))
+ channel = connection.channel()
+ channel.queue_declare(queue='queue-properties-subscribe-test',
+ durable=True,
+ arguments={
+ 'x-message-ttl': 60000,
+ 'x-expires': 70000,
+ 'x-max-length': 10,
+ 'x-max-length-bytes': 20000,
+ 'x-dead-letter-exchange': 'dead-letter-exchange',
+ 'x-dead-letter-routing-key': 'dead-letter-routing-key',
+ 'x-max-priority': 6,
+ })
+
+ self.conn.disconnect()
+ connection.close()
+
+ def test_send(self):
+ destination = "/queue/queue-properties-send-test"
+
+ # send
+ self.conn.send(destination, "test1",
+ headers={
+ 'x-message-ttl': 60000,
+ 'x-expires': 70000,
+ 'x-max-length': 10,
+ 'x-max-length-bytes': 20000,
+ 'x-dead-letter-exchange': 'dead-letter-exchange',
+ 'x-dead-letter-routing-key': 'dead-letter-routing-key',
+ 'x-max-priority': 6,
+ })
+
+ # now try to declare the queue using pika
+ # if the properties are the same we should
+ # not get any error
+ connection = pika.BlockingConnection(pika.ConnectionParameters(
+ host='127.0.0.1', port=int(os.environ["AMQP_PORT"])))
+ channel = connection.channel()
+ channel.queue_declare(queue='queue-properties-send-test',
+ durable=True,
+ arguments={
+ 'x-message-ttl': 60000,
+ 'x-expires': 70000,
+ 'x-max-length': 10,
+ 'x-max-length-bytes': 20000,
+ 'x-dead-letter-exchange': 'dead-letter-exchange',
+ 'x-dead-letter-routing-key': 'dead-letter-routing-key',
+ 'x-max-priority': 6,
+ })
+
+ self.conn.disconnect()
+ connection.close()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py
new file mode 100644
index 0000000000..3dfdd72cc9
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/redelivered.py
@@ -0,0 +1,40 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import time
+
+class TestRedelivered(base.BaseTest):
+
+ def test_redelivered(self):
+ destination = "/queue/redelivered-test"
+
+ # subscribe and send message
+ self.subscribe_dest(self.conn, destination, None, ack='client')
+ self.conn.send(destination, "test1")
+ message_receive_timeout = 30
+ self.assertTrue(self.listener.wait(message_receive_timeout), "Test message not received within {0} seconds".format(message_receive_timeout))
+ self.assertEquals(1, len(self.listener.messages))
+ self.assertEquals('false', self.listener.messages[0]['headers']['redelivered'])
+
+ # disconnect with no ack
+ self.conn.disconnect()
+
+ # now reconnect
+ conn2 = self.create_connection()
+ try:
+ listener2 = base.WaitableListener()
+ listener2.reset(1)
+ conn2.set_listener('', listener2)
+ self.subscribe_dest(conn2, destination, None, ack='client')
+ self.assertTrue(listener2.wait(), "message not received again")
+ self.assertEquals(1, len(listener2.messages))
+ self.assertEquals('true', listener2.messages[0]['headers']['redelivered'])
+ finally:
+ conn2.disconnect()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py
new file mode 100644
index 0000000000..6fbcb3d492
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/reliability.py
@@ -0,0 +1,41 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import base
+import stomp
+import unittest
+import time
+
+class TestReliability(base.BaseTest):
+
+ def test_send_and_disconnect(self):
+ ''' Test close socket after send does not lose messages '''
+ destination = "/queue/reliability"
+ pub_conn = self.create_connection()
+ try:
+ msg = "0" * (128)
+
+ count = 10000
+
+ listener = base.WaitableListener()
+ listener.reset(count)
+ self.conn.set_listener('', listener)
+ self.subscribe_dest(self.conn, destination, None)
+
+ for x in range(0, count):
+ pub_conn.send(destination, msg + str(x))
+ time.sleep(2.0)
+ pub_conn.disconnect()
+
+ if listener.wait(30):
+ self.assertEquals(count, len(listener.messages))
+ else:
+ listener.print_state("Final state of listener:")
+ self.fail("Did not receive %s messages in time" % count)
+ finally:
+ if pub_conn.is_connected():
+ pub_conn.disconnect()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/ssl_lifecycle.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/ssl_lifecycle.py
new file mode 100644
index 0000000000..570ad9f5a3
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/ssl_lifecycle.py
@@ -0,0 +1,81 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import os
+import os.path
+import sys
+
+import stomp
+import base
+import ssl
+
+base_path = os.path.dirname(sys.argv[0])
+
+ssl_key_file = os.path.join(os.getenv('SSL_CERTS_PATH'), 'client', 'key.pem')
+ssl_cert_file = os.path.join(os.getenv('SSL_CERTS_PATH'), 'client', 'cert.pem')
+ssl_ca_certs = os.path.join(os.getenv('SSL_CERTS_PATH'), 'testca', 'cacert.pem')
+
+class TestSslClient(unittest.TestCase):
+
+ def __ssl_connect(self):
+ conn = stomp.Connection(host_and_ports = [ ('localhost', int(os.environ["STOMP_PORT_TLS"])) ],
+ use_ssl = True, ssl_key_file = ssl_key_file,
+ ssl_cert_file = ssl_cert_file,
+ ssl_ca_certs = ssl_ca_certs)
+ print("FILE: ".format(ssl_cert_file))
+ conn.start()
+ conn.connect("guest", "guest")
+ return conn
+
+ def __ssl_auth_connect(self):
+ conn = stomp.Connection(host_and_ports = [ ('localhost', int(os.environ["STOMP_PORT_TLS"])) ],
+ use_ssl = True, ssl_key_file = ssl_key_file,
+ ssl_cert_file = ssl_cert_file,
+ ssl_ca_certs = ssl_ca_certs)
+ conn.start()
+ conn.connect()
+ return conn
+
+ def test_ssl_connect(self):
+ conn = self.__ssl_connect()
+ conn.disconnect()
+
+ def test_ssl_auth_connect(self):
+ conn = self.__ssl_auth_connect()
+ conn.disconnect()
+
+ def test_ssl_send_receive(self):
+ conn = self.__ssl_connect()
+ self.__test_conn(conn)
+
+ def test_ssl_auth_send_receive(self):
+ conn = self.__ssl_auth_connect()
+ self.__test_conn(conn)
+
+ def __test_conn(self, conn):
+ try:
+ listener = base.WaitableListener()
+
+ conn.set_listener('', listener)
+
+ d = "/topic/ssl.test"
+ conn.subscribe(destination=d, ack="auto", id="ctag", receipt="sub")
+
+ self.assertTrue(listener.wait(1))
+
+ self.assertEquals("sub",
+ listener.receipts[0]['headers']['receipt-id'])
+
+ listener.reset(1)
+ conn.send(body="Hello SSL!", destination=d)
+
+ self.assertTrue(listener.wait())
+
+ self.assertEquals("Hello SSL!", listener.messages[0]['message'])
+ finally:
+ conn.disconnect()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test.py
new file mode 100755
index 0000000000..01967465a2
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+import test_runner
+
+if __name__ == '__main__':
+ modules = [
+ 'parsing',
+ 'errors',
+ 'lifecycle',
+ 'ack',
+ 'amqp_headers',
+ 'queue_properties',
+ 'reliability',
+ 'transactions',
+ 'x_queue_name',
+ 'destinations',
+ 'redelivered',
+ 'topic_permissions',
+ 'x_queue_type_quorum'
+ ]
+ test_runner.run_unittests(modules)
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_connect_options.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_connect_options.py
new file mode 100755
index 0000000000..10efa4fbb4
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_connect_options.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import test_runner
+
+if __name__ == '__main__':
+ modules = ['connect_options']
+ test_runner.run_unittests(modules)
+
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py
new file mode 100644
index 0000000000..9aa5855b02
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_runner.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import sys
+import os
+
+def run_unittests(modules):
+ suite = unittest.TestSuite()
+ for m in modules:
+ mod = __import__(m)
+ for name in dir(mod):
+ obj = getattr(mod, name)
+ if name.startswith("Test") and issubclass(obj, unittest.TestCase):
+ suite.addTest(unittest.TestLoader().loadTestsFromTestCase(obj))
+
+ ts = unittest.TextTestRunner().run(unittest.TestSuite(suite))
+ if ts.errors or ts.failures:
+ sys.exit(1)
+
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_ssl.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_ssl.py
new file mode 100755
index 0000000000..95d2d2baa7
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_ssl.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import test_runner
+import test_util
+
+if __name__ == '__main__':
+ modules = ['ssl_lifecycle']
+ test_util.ensure_ssl_auth_user()
+ test_runner.run_unittests(modules)
+
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py
new file mode 100644
index 0000000000..911100c54f
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/test_util.py
@@ -0,0 +1,52 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import subprocess
+import socket
+import sys
+import os
+import os.path
+
+def ensure_ssl_auth_user():
+ user = 'O=client,CN=%s' % socket.gethostname()
+ rabbitmqctl(['stop_app'])
+ rabbitmqctl(['reset'])
+ rabbitmqctl(['start_app'])
+ rabbitmqctl(['add_user', user, 'foo'])
+ rabbitmqctl(['clear_password', user])
+ rabbitmqctl(['set_permissions', user, '.*', '.*', '.*'])
+
+def enable_implicit_connect():
+ switch_config(implicit_connect='true', default_user='[{login, "guest"}, {passcode, "guest"}]')
+
+def disable_implicit_connect():
+ switch_config(implicit_connect='false', default_user='[]')
+
+def enable_default_user():
+ switch_config(default_user='[{login, "guest"}, {passcode, "guest"}]')
+
+def disable_default_user():
+ switch_config(default_user='[]')
+
+def switch_config(implicit_connect='', default_user=''):
+ cmd = ''
+ cmd += 'ok = io:format("~n===== Ranch listeners (before stop) =====~n~n~p~n", [ranch:info()]),'
+ cmd += 'ok = application:stop(rabbitmq_stomp),'
+ cmd += 'io:format("~n===== Ranch listeners (after stop) =====~n~n~p~n", [ranch:info()]),'
+ if implicit_connect:
+ cmd += 'ok = application:set_env(rabbitmq_stomp,implicit_connect,{}),'.format(implicit_connect)
+ if default_user:
+ cmd += 'ok = application:set_env(rabbitmq_stomp,default_user,{}),'.format(default_user)
+ cmd += 'ok = application:start(rabbitmq_stomp),'
+ cmd += 'io:format("~n===== Ranch listeners (after start) =====~n~n~p~n", [ranch:info()]).'
+ rabbitmqctl(['eval', cmd])
+
+def rabbitmqctl(args):
+ ctl = os.getenv('RABBITMQCTL')
+ cmdline = [ctl, '-n', os.getenv('RABBITMQ_NODENAME')]
+ cmdline.extend(args)
+ subprocess.check_call(cmdline)
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py
new file mode 100644
index 0000000000..6272f6d8b5
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/topic_permissions.py
@@ -0,0 +1,52 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import base
+import test_util
+import sys
+
+
+class TestTopicPermissions(base.BaseTest):
+ @classmethod
+ def setUpClass(cls):
+ test_util.rabbitmqctl(['set_topic_permissions', 'guest', 'amq.topic', '^{username}.Authorised', '^{username}.Authorised'])
+ cls.authorised_topic = '/topic/guest.AuthorisedTopic'
+ cls.restricted_topic = '/topic/guest.RestrictedTopic'
+
+ @classmethod
+ def tearDownClass(cls):
+ test_util.rabbitmqctl(['clear_topic_permissions', 'guest'])
+
+ def test_publish_authorisation(self):
+ ''' Test topic permissions via publish '''
+ self.listener.reset()
+
+ # send on authorised topic
+ self.subscribe_dest(self.conn, self.authorised_topic, None)
+ self.conn.send(self.authorised_topic, "authorised hello")
+
+ self.assertTrue(self.listener.wait(), "Timeout, no message received")
+
+ # assert no errors
+ if len(self.listener.errors) > 0:
+ self.fail(self.listener.errors[0]['message'])
+
+ # check msg content
+ msg = self.listener.messages[0]
+ self.assertEqual("authorised hello", msg['message'])
+ self.assertEqual(self.authorised_topic, msg['headers']['destination'])
+
+ self.listener.reset()
+
+ # send on restricted topic
+ self.conn.send(self.restricted_topic, "hello")
+
+ self.assertTrue(self.listener.wait(), "Timeout, no message received")
+
+ # assert errors
+ self.assertGreater(len(self.listener.errors), 0)
+ self.assertIn("ACCESS_REFUSED", self.listener.errors[0]['message'])
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py
new file mode 100644
index 0000000000..379806bfb8
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/transactions.py
@@ -0,0 +1,61 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import base
+import time
+
+class TestTransactions(base.BaseTest):
+
+ def test_tx_commit(self):
+ ''' Test TX with a COMMIT and ensure messages are delivered '''
+ destination = "/exchange/amq.fanout"
+ tx = "test.tx"
+
+ self.listener.reset()
+ self.subscribe_dest(self.conn, destination, None)
+ self.conn.begin(transaction=tx)
+ self.conn.send(destination, "hello!", transaction=tx)
+ self.conn.send(destination, "again!")
+
+ ## should see the second message
+ self.assertTrue(self.listener.wait(3))
+ self.assertEquals(1, len(self.listener.messages))
+ self.assertEquals("again!", self.listener.messages[0]['message'])
+
+ ## now look for the first message
+ self.listener.reset()
+ self.conn.commit(transaction=tx)
+ self.assertTrue(self.listener.wait(3))
+ self.assertEquals(1, len(self.listener.messages),
+ "Missing committed message")
+ self.assertEquals("hello!", self.listener.messages[0]['message'])
+
+ def test_tx_abort(self):
+ ''' Test TX with an ABORT and ensure messages are discarded '''
+ destination = "/exchange/amq.fanout"
+ tx = "test.tx"
+
+ self.listener.reset()
+ self.subscribe_dest(self.conn, destination, None)
+ self.conn.begin(transaction=tx)
+ self.conn.send(destination, "hello!", transaction=tx)
+ self.conn.send(destination, "again!")
+
+ ## should see the second message
+ self.assertTrue(self.listener.wait(3))
+ self.assertEquals(1, len(self.listener.messages))
+ self.assertEquals("again!", self.listener.messages[0]['message'])
+
+ ## now look for the first message to be discarded
+ self.listener.reset()
+ self.conn.abort(transaction=tx)
+ self.assertFalse(self.listener.wait(3))
+ self.assertEquals(0, len(self.listener.messages),
+ "Unexpected committed message")
+
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py
new file mode 100644
index 0000000000..f2c90486eb
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_name.py
@@ -0,0 +1,71 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import unittest
+import stomp
+import pika
+import base
+import time
+import os
+
+class TestUserGeneratedQueueName(base.BaseTest):
+
+ def test_exchange_dest(self):
+ queueName='my-user-generated-queue-name-exchange'
+
+ # subscribe
+ self.subscribe_dest(
+ self.conn,
+ '/exchange/amq.direct/test',
+ None,
+ headers={ 'x-queue-name': queueName }
+ )
+
+ connection = pika.BlockingConnection(
+ pika.ConnectionParameters( host='127.0.0.1', port=int(os.environ["AMQP_PORT"])))
+ channel = connection.channel()
+
+ # publish a message to the named queue
+ channel.basic_publish(
+ exchange='',
+ routing_key=queueName,
+ body='Hello World!')
+
+ # check if we receive the message from the STOMP subscription
+ self.assertTrue(self.listener.wait(2), "initial message not received")
+ self.assertEquals(1, len(self.listener.messages))
+
+ self.conn.disconnect()
+ connection.close()
+
+ def test_topic_dest(self):
+ queueName='my-user-generated-queue-name-topic'
+
+ # subscribe
+ self.subscribe_dest(
+ self.conn,
+ '/topic/test',
+ None,
+ headers={ 'x-queue-name': queueName }
+ )
+
+ connection = pika.BlockingConnection(
+ pika.ConnectionParameters( host='127.0.0.1', port=int(os.environ["AMQP_PORT"])))
+ channel = connection.channel()
+
+ # publish a message to the named queue
+ channel.basic_publish(
+ exchange='',
+ routing_key=queueName,
+ body='Hello World!')
+
+ # check if we receive the message from the STOMP subscription
+ self.assertTrue(self.listener.wait(2), "initial message not received")
+ self.assertEquals(1, len(self.listener.messages))
+
+ self.conn.disconnect()
+ connection.close()
diff --git a/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py
new file mode 100644
index 0000000000..1018abd0d4
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/python_SUITE_data/src/x_queue_type_quorum.py
@@ -0,0 +1,62 @@
+## This Source Code Form is subject to the terms of the Mozilla Public
+## License, v. 2.0. If a copy of the MPL was not distributed with this
+## file, You can obtain one at https://mozilla.org/MPL/2.0/.
+##
+## Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+##
+
+import pika
+import base
+import time
+import os
+import re
+
+
+class TestUserGeneratedQueueName(base.BaseTest):
+
+ def test_quorum_queue(self):
+ queueName = 'my-quorum-queue'
+
+ # subscribe
+ self.subscribe_dest(
+ self.conn,
+ '/topic/quorum-queue-test',
+ None,
+ headers={
+ 'x-queue-name': queueName,
+ 'x-queue-type': 'quorum',
+ 'durable': True,
+ 'auto-delete': False,
+ 'id': 1234
+ }
+ )
+
+ # let the quorum queue some time to start
+ time.sleep(5)
+
+ connection = pika.BlockingConnection(
+ pika.ConnectionParameters(host='127.0.0.1', port=int(os.environ["AMQP_PORT"])))
+ channel = connection.channel()
+
+ # publish a message to the named queue
+ channel.basic_publish(
+ exchange='',
+ routing_key=queueName,
+ body='Hello World!')
+
+ # could we declare a quorum queue?
+ quorum_queue_supported = True
+ if len(self.listener.errors) > 0:
+ pattern = re.compile(r"feature flag is disabled", re.MULTILINE)
+ for error in self.listener.errors:
+ if pattern.search(error['message']) != None:
+ quorum_queue_supported = False
+ break
+
+ if quorum_queue_supported:
+ # check if we receive the message from the STOMP subscription
+ self.assertTrue(self.listener.wait(5), "initial message not received")
+ self.assertEquals(1, len(self.listener.messages))
+ self.conn.disconnect()
+
+ connection.close()
diff --git a/deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl b/deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl
new file mode 100644
index 0000000000..739512e3b3
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/src/rabbit_stomp_client.erl
@@ -0,0 +1,75 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+%% The stupidest client imaginable, just for testing.
+
+-module(rabbit_stomp_client).
+
+-export([connect/1, connect/2, connect/4, connect/5, disconnect/1, send/2, send/3, send/4, recv/1]).
+
+-include("rabbit_stomp_frame.hrl").
+
+-define(TIMEOUT, 1000). % milliseconds
+
+connect(Port) -> connect0([], "guest", "guest", Port, []).
+connect(V, Port) -> connect0([{"accept-version", V}], "guest", "guest", Port, []).
+connect(V, Login, Pass, Port) -> connect0([{"accept-version", V}], Login, Pass, Port, []).
+connect(V, Login, Pass, Port, Headers) -> connect0([{"accept-version", V}], Login, Pass, Port, Headers).
+
+connect0(Version, Login, Pass, Port, Headers) ->
+ %% The default port is 61613 but it's in the middle of the ephemeral
+ %% ports range on many operating systems. Therefore, there is a
+ %% chance this port is already in use. Let's use a port close to the
+ %% AMQP default port.
+ {ok, Sock} = gen_tcp:connect(localhost, Port, [{active, false}, binary]),
+ Client0 = recv_state(Sock),
+ send(Client0, "CONNECT", [{"login", Login},
+ {"passcode", Pass} | Version] ++ Headers),
+ {#stomp_frame{command = "CONNECTED"}, Client1} = recv(Client0),
+ {ok, Client1}.
+
+disconnect(Client = {Sock, _}) ->
+ send(Client, "DISCONNECT"),
+ gen_tcp:close(Sock).
+
+send(Client, Command) ->
+ send(Client, Command, []).
+
+send(Client, Command, Headers) ->
+ send(Client, Command, Headers, []).
+
+send({Sock, _}, Command, Headers, Body) ->
+ Frame = rabbit_stomp_frame:serialize(
+ #stomp_frame{command = list_to_binary(Command),
+ headers = Headers,
+ body_iolist = Body}),
+ gen_tcp:send(Sock, Frame).
+
+recv_state(Sock) ->
+ {Sock, []}.
+
+recv({_Sock, []} = Client) ->
+ recv(Client, rabbit_stomp_frame:initial_state(), 0);
+recv({Sock, [Frame | Frames]}) ->
+ {Frame, {Sock, Frames}}.
+
+recv(Client = {Sock, _}, FrameState, Length) ->
+ {ok, Payload} = gen_tcp:recv(Sock, Length, ?TIMEOUT),
+ parse(Payload, Client, FrameState, Length).
+
+parse(Payload, Client = {Sock, FramesRev}, FrameState, Length) ->
+ case rabbit_stomp_frame:parse(Payload, FrameState) of
+ {ok, Frame, <<>>} ->
+ recv({Sock, lists:reverse([Frame | FramesRev])});
+ {ok, Frame, <<"\n">>} ->
+ recv({Sock, lists:reverse([Frame | FramesRev])});
+ {ok, Frame, Rest} ->
+ parse(Rest, {Sock, [Frame | FramesRev]},
+ rabbit_stomp_frame:initial_state(), Length);
+ {more, NewState} ->
+ recv(Client, NewState, 0)
+ end.
diff --git a/deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl b/deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl
new file mode 100644
index 0000000000..6b5b9298fa
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/src/rabbit_stomp_publish_test.erl
@@ -0,0 +1,80 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(rabbit_stomp_publish_test).
+
+-export([run/0]).
+
+-include("rabbit_stomp_frame.hrl").
+
+-define(DESTINATION, "/queue/test").
+
+-define(MICROS_PER_UPDATE, 5000000).
+-define(MICROS_PER_UPDATE_MSG, 100000).
+-define(MICROS_PER_SECOND, 1000000).
+
+%% A very simple publish-and-consume-as-fast-as-you-can test.
+
+run() ->
+ [put(K, 0) || K <- [sent, recd, last_sent, last_recd]],
+ put(last_ts, erlang:monotonic_time()),
+ {ok, Pub} = rabbit_stomp_client:connect(),
+ {ok, Recv} = rabbit_stomp_client:connect(),
+ Self = self(),
+ spawn(fun() -> publish(Self, Pub, 0, erlang:monotonic_time()) end),
+ rabbit_stomp_client:send(
+ Recv, "SUBSCRIBE", [{"destination", ?DESTINATION}]),
+ spawn(fun() -> recv(Self, Recv, 0, erlang:monotonic_time()) end),
+ report().
+
+report() ->
+ receive
+ {sent, C} -> put(sent, C);
+ {recd, C} -> put(recd, C)
+ end,
+ Diff = erlang:convert_time_unit(
+ erlang:monotonic_time() - get(last_ts), native, microseconds),
+ case Diff > ?MICROS_PER_UPDATE of
+ true -> S = get(sent) - get(last_sent),
+ R = get(recd) - get(last_recd),
+ put(last_sent, get(sent)),
+ put(last_recd, get(recd)),
+ put(last_ts, erlang:monotonic_time()),
+ io:format("Send ~p msg/s | Recv ~p msg/s~n",
+ [trunc(S * ?MICROS_PER_SECOND / Diff),
+ trunc(R * ?MICROS_PER_SECOND / Diff)]);
+ false -> ok
+ end,
+ report().
+
+publish(Owner, Client, Count, TS) ->
+ rabbit_stomp_client:send(
+ Client, "SEND", [{"destination", ?DESTINATION}],
+ [integer_to_list(Count)]),
+ Diff = erlang:convert_time_unit(
+ erlang:monotonic_time() - TS, native, microseconds),
+ case Diff > ?MICROS_PER_UPDATE_MSG of
+ true -> Owner ! {sent, Count + 1},
+ publish(Owner, Client, Count + 1,
+ erlang:monotonic_time());
+ false -> publish(Owner, Client, Count + 1, TS)
+ end.
+
+recv(Owner, Client0, Count, TS) ->
+ {#stomp_frame{body_iolist = Body}, Client1} =
+ rabbit_stomp_client:recv(Client0),
+ BodyInt = list_to_integer(binary_to_list(iolist_to_binary(Body))),
+ Count = BodyInt,
+ Diff = erlang:convert_time_unit(
+ erlang:monotonic_time() - TS, native, microseconds),
+ case Diff > ?MICROS_PER_UPDATE_MSG of
+ true -> Owner ! {recd, Count + 1},
+ recv(Owner, Client1, Count + 1,
+ erlang:monotonic_time());
+ false -> recv(Owner, Client1, Count + 1, TS)
+ end.
+
diff --git a/deps/rabbitmq_stomp/test/src/test.config b/deps/rabbitmq_stomp/test/src/test.config
new file mode 100644
index 0000000000..5968824996
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/src/test.config
@@ -0,0 +1,13 @@
+[{rabbitmq_stomp, [{default_user, []},
+ {ssl_cert_login, true},
+ {tcp_listeners, [5673]},
+ {ssl_listeners, [5674]}
+ ]},
+ {rabbit, [{ssl_options, [{cacertfile,"%%CERTS_DIR%%/testca/cacert.pem"},
+ {certfile,"%%CERTS_DIR%%/server/cert.pem"},
+ {keyfile,"%%CERTS_DIR%%/server/key.pem"},
+ {verify,verify_peer},
+ {fail_if_no_peer_cert,true}
+ ]}
+ ]}
+].
diff --git a/deps/rabbitmq_stomp/test/topic_SUITE.erl b/deps/rabbitmq_stomp/test/topic_SUITE.erl
new file mode 100644
index 0000000000..4a6421a326
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/topic_SUITE.erl
@@ -0,0 +1,170 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(topic_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include("rabbit_stomp.hrl").
+-include("rabbit_stomp_frame.hrl").
+-include("rabbit_stomp_headers.hrl").
+
+all() ->
+ [{group, list_to_atom("version_" ++ V)} || V <- ?SUPPORTED_VERSIONS].
+
+groups() ->
+ Tests = [
+ publish_topic_authorisation,
+ subscribe_topic_authorisation,
+ change_default_topic_exchange
+ ],
+
+ [{list_to_atom("version_" ++ V), [sequence], Tests}
+ || V <- ?SUPPORTED_VERSIONS].
+
+init_per_suite(Config) ->
+ Config1 = rabbit_ct_helpers:set_config(Config,
+ [{rmq_nodename_suffix, ?MODULE}]),
+ rabbit_ct_helpers:log_environment(),
+ rabbit_ct_helpers:run_setup_steps(Config1,
+ rabbit_ct_broker_helpers:setup_steps()).
+
+end_per_suite(Config) ->
+ rabbit_ct_helpers:run_teardown_steps(Config,
+ rabbit_ct_broker_helpers:teardown_steps()).
+
+init_per_group(Group, Config) ->
+ Version = string:sub_string(atom_to_list(Group), 9),
+ rabbit_ct_helpers:set_config(Config, [{version, Version}]).
+
+end_per_group(_Group, Config) -> Config.
+
+init_per_testcase(_TestCase, Config) ->
+ Version = ?config(version, Config),
+ StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
+ {ok, Connection} = amqp_connection:start(#amqp_params_direct{
+ node = rabbit_ct_broker_helpers:get_node_config(Config, 0, nodename)
+ }),
+ {ok, Channel} = amqp_connection:open_channel(Connection),
+ {ok, Client} = rabbit_stomp_client:connect(Version, StompPort),
+ Config1 = rabbit_ct_helpers:set_config(Config, [
+ {amqp_connection, Connection},
+ {amqp_channel, Channel},
+ {stomp_client, Client}
+ ]),
+ init_per_testcase0(Config1).
+
+end_per_testcase(_TestCase, Config) ->
+ Connection = ?config(amqp_connection, Config),
+ Channel = ?config(amqp_channel, Config),
+ Client = ?config(stomp_client, Config),
+ rabbit_stomp_client:disconnect(Client),
+ amqp_channel:close(Channel),
+ amqp_connection:close(Connection),
+ end_per_testcase0(Config).
+
+init_per_testcase0(Config) ->
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, add_user,
+ [<<"user">>, <<"pass">>, <<"acting-user">>]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, set_permissions, [
+ <<"user">>, <<"/">>, <<".*">>, <<".*">>, <<".*">>, <<"acting-user">>]),
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, set_topic_permissions, [
+ <<"user">>, <<"/">>, <<"amq.topic">>, <<"^{username}.Authorised">>, <<"^{username}.Authorised">>, <<"acting-user">>]),
+ Version = ?config(version, Config),
+ StompPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_stomp),
+ {ok, ClientFoo} = rabbit_stomp_client:connect(Version, "user", "pass", StompPort),
+ rabbit_ct_helpers:set_config(Config, [{client_foo, ClientFoo}]).
+
+end_per_testcase0(Config) ->
+ ClientFoo = ?config(client_foo, Config),
+ rabbit_stomp_client:disconnect(ClientFoo),
+ rabbit_ct_broker_helpers:rpc(Config, 0, rabbit_auth_backend_internal, delete_user,
+ [<<"user">>, <<"acting-user">>]),
+ Config.
+
+publish_topic_authorisation(Config) ->
+ ClientFoo = ?config(client_foo, Config),
+
+ AuthorisedTopic = "/topic/user.AuthorisedTopic",
+ RestrictedTopic = "/topic/user.RestrictedTopic",
+
+ %% send on authorised topic
+ rabbit_stomp_client:send(
+ ClientFoo, "SUBSCRIBE", [{"destination", AuthorisedTopic}]),
+
+ rabbit_stomp_client:send(
+ ClientFoo, "SEND", [{"destination", AuthorisedTopic}], ["authorised hello"]),
+
+ {ok, _Client1, _, Body} = stomp_receive(ClientFoo, "MESSAGE"),
+ [<<"authorised hello">>] = Body,
+
+ %% send on restricted topic
+ rabbit_stomp_client:send(
+ ClientFoo, "SEND", [{"destination", RestrictedTopic}], ["hello"]),
+ {ok, _Client2, Hdrs2, _} = stomp_receive(ClientFoo, "ERROR"),
+ "access_refused" = proplists:get_value("message", Hdrs2),
+ ok.
+
+subscribe_topic_authorisation(Config) ->
+ ClientFoo = ?config(client_foo, Config),
+
+ AuthorisedTopic = "/topic/user.AuthorisedTopic",
+ RestrictedTopic = "/topic/user.RestrictedTopic",
+
+ %% subscribe to authorised topic
+ rabbit_stomp_client:send(
+ ClientFoo, "SUBSCRIBE", [{"destination", AuthorisedTopic}]),
+
+ rabbit_stomp_client:send(
+ ClientFoo, "SEND", [{"destination", AuthorisedTopic}], ["authorised hello"]),
+
+ {ok, _Client1, _, Body} = stomp_receive(ClientFoo, "MESSAGE"),
+ [<<"authorised hello">>] = Body,
+
+ %% subscribe to restricted topic
+ rabbit_stomp_client:send(
+ ClientFoo, "SUBSCRIBE", [{"destination", RestrictedTopic}]),
+ {ok, _Client2, Hdrs2, _} = stomp_receive(ClientFoo, "ERROR"),
+ "access_refused" = proplists:get_value("message", Hdrs2),
+ ok.
+
+change_default_topic_exchange(Config) ->
+ Channel = ?config(amqp_channel, Config),
+ ClientFoo = ?config(client_foo, Config),
+ Ex = <<"my-topic-exchange">>,
+ AuthorisedTopic = "/topic/user.AuthorisedTopic",
+
+ Declare = #'exchange.declare'{exchange = Ex, type = <<"topic">>},
+ #'exchange.declare_ok'{} = amqp_channel:call(Channel, Declare),
+
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, set_env, [rabbitmq_stomp, default_topic_exchange, Ex]),
+
+ rabbit_stomp_client:send(
+ ClientFoo, "SUBSCRIBE", [{"destination", AuthorisedTopic}]),
+
+ rabbit_stomp_client:send(
+ ClientFoo, "SEND", [{"destination", AuthorisedTopic}], ["ohai there"]),
+
+ {ok, _Client1, _, Body} = stomp_receive(ClientFoo, "MESSAGE"),
+ [<<"ohai there">>] = Body,
+
+ Delete = #'exchange.delete'{exchange = Ex},
+ #'exchange.delete_ok'{} = amqp_channel:call(Channel, Delete),
+ ok = rabbit_ct_broker_helpers:rpc(Config, 0, application, unset_env, [rabbitmq_stomp, default_topic_exchange]),
+ ok.
+
+
+stomp_receive(Client, Command) ->
+ {#stomp_frame{command = Command,
+ headers = Hdrs,
+ body_iolist = Body}, Client1} =
+ rabbit_stomp_client:recv(Client),
+ {ok, Client1, Hdrs, Body}.
+
diff --git a/deps/rabbitmq_stomp/test/util_SUITE.erl b/deps/rabbitmq_stomp/test/util_SUITE.erl
new file mode 100644
index 0000000000..89d9d9e37e
--- /dev/null
+++ b/deps/rabbitmq_stomp/test/util_SUITE.erl
@@ -0,0 +1,242 @@
+%% This Source Code Form is subject to the terms of the Mozilla Public
+%% License, v. 2.0. If a copy of the MPL was not distributed with this
+%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
+%%
+%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
+%%
+
+-module(util_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("amqp_client/include/amqp_client.hrl").
+-include_lib("amqp_client/include/rabbit_routing_prefixes.hrl").
+-include("rabbit_stomp_frame.hrl").
+-compile(export_all).
+
+all() -> [
+ longstr_field,
+ message_properties,
+ message_headers,
+ minimal_message_headers_with_no_custom,
+ headers_post_process,
+ headers_post_process_noop_replyto,
+ headers_post_process_noop2,
+ negotiate_version_both_empty,
+ negotiate_version_no_common,
+ negotiate_version_simple_common,
+ negotiate_version_two_choice_common,
+ negotiate_version_two_choice_common_out_of_order,
+ negotiate_version_two_choice_big_common,
+ negotiate_version_choice_mismatched_length,
+ negotiate_version_choice_duplicates,
+ trim_headers,
+ ack_mode_auto,
+ ack_mode_auto_default,
+ ack_mode_client,
+ ack_mode_client_individual,
+ consumer_tag_id,
+ consumer_tag_destination,
+ consumer_tag_invalid,
+ parse_valid_message_id,
+ parse_invalid_message_id
+ ].
+
+
+%%--------------------------------------------------------------------
+%% Header Processing Tests
+%%--------------------------------------------------------------------
+
+longstr_field(_) ->
+ {<<"ABC">>, longstr, <<"DEF">>} =
+ rabbit_stomp_util:longstr_field("ABC", "DEF").
+
+message_properties(_) ->
+ Headers = [
+ {"content-type", "text/plain"},
+ {"content-encoding", "UTF-8"},
+ {"persistent", "true"},
+ {"priority", "1"},
+ {"correlation-id", "123"},
+ {"reply-to", "something"},
+ {"expiration", "my-expiration"},
+ {"amqp-message-id", "M123"},
+ {"timestamp", "123456"},
+ {"type", "freshly-squeezed"},
+ {"user-id", "joe"},
+ {"app-id", "joe's app"},
+ {"str", "foo"},
+ {"int", "123"}
+ ],
+
+ #'P_basic'{
+ content_type = <<"text/plain">>,
+ content_encoding = <<"UTF-8">>,
+ delivery_mode = 2,
+ priority = 1,
+ correlation_id = <<"123">>,
+ reply_to = <<"something">>,
+ expiration = <<"my-expiration">>,
+ message_id = <<"M123">>,
+ timestamp = 123456,
+ type = <<"freshly-squeezed">>,
+ user_id = <<"joe">>,
+ app_id = <<"joe's app">>,
+ headers = [{<<"str">>, longstr, <<"foo">>},
+ {<<"int">>, longstr, <<"123">>}]
+ } =
+ rabbit_stomp_util:message_properties(#stomp_frame{headers = Headers}).
+
+message_headers(_) ->
+ Properties = #'P_basic'{
+ headers = [{<<"str">>, longstr, <<"foo">>},
+ {<<"int">>, signedint, 123}],
+ content_type = <<"text/plain">>,
+ content_encoding = <<"UTF-8">>,
+ delivery_mode = 2,
+ priority = 1,
+ correlation_id = 123,
+ reply_to = <<"something">>,
+ message_id = <<"M123">>,
+ timestamp = 123456,
+ type = <<"freshly-squeezed">>,
+ user_id = <<"joe">>,
+ app_id = <<"joe's app">>},
+
+ Headers = rabbit_stomp_util:message_headers(Properties),
+
+ Expected = [
+ {"content-type", "text/plain"},
+ {"content-encoding", "UTF-8"},
+ {"persistent", "true"},
+ {"priority", "1"},
+ {"correlation-id", "123"},
+ {"reply-to", "something"},
+ {"expiration", "my-expiration"},
+ {"amqp-message-id", "M123"},
+ {"timestamp", "123456"},
+ {"type", "freshly-squeezed"},
+ {"user-id", "joe"},
+ {"app-id", "joe's app"},
+ {"str", "foo"},
+ {"int", "123"}
+ ],
+
+ [] = lists:subtract(Headers, Expected).
+
+minimal_message_headers_with_no_custom(_) ->
+ Properties = #'P_basic'{},
+
+ Headers = rabbit_stomp_util:message_headers(Properties),
+ Expected = [
+ {"content-type", "text/plain"},
+ {"content-encoding", "UTF-8"},
+ {"amqp-message-id", "M123"}
+ ],
+
+ [] = lists:subtract(Headers, Expected).
+
+headers_post_process(_) ->
+ Headers = [{"header1", "1"},
+ {"header2", "12"},
+ {"reply-to", "something"}],
+ Expected = [{"header1", "1"},
+ {"header2", "12"},
+ {"reply-to", "/reply-queue/something"}],
+ [] = lists:subtract(
+ rabbit_stomp_util:headers_post_process(Headers), Expected).
+
+headers_post_process_noop_replyto(_) ->
+ [begin
+ Headers = [{"reply-to", Prefix ++ "/something"}],
+ Headers = rabbit_stomp_util:headers_post_process(Headers)
+ end || Prefix <- rabbit_routing_util:dest_prefixes()].
+
+headers_post_process_noop2(_) ->
+ Headers = [{"header1", "1"},
+ {"header2", "12"}],
+ Expected = [{"header1", "1"},
+ {"header2", "12"}],
+ [] = lists:subtract(
+ rabbit_stomp_util:headers_post_process(Headers), Expected).
+
+negotiate_version_both_empty(_) ->
+ {error, no_common_version} = rabbit_stomp_util:negotiate_version([],[]).
+
+negotiate_version_no_common(_) ->
+ {error, no_common_version} =
+ rabbit_stomp_util:negotiate_version(["1.2"],["1.3"]).
+
+negotiate_version_simple_common(_) ->
+ {ok, "1.2"} =
+ rabbit_stomp_util:negotiate_version(["1.2"],["1.2"]).
+
+negotiate_version_two_choice_common(_) ->
+ {ok, "1.3"} =
+ rabbit_stomp_util:negotiate_version(["1.2", "1.3"],["1.2", "1.3"]).
+
+negotiate_version_two_choice_common_out_of_order(_) ->
+ {ok, "1.3"} =
+ rabbit_stomp_util:negotiate_version(["1.3", "1.2"],["1.2", "1.3"]).
+
+negotiate_version_two_choice_big_common(_) ->
+ {ok, "1.20.23"} =
+ rabbit_stomp_util:negotiate_version(["1.20.23", "1.30.456"],
+ ["1.20.23", "1.30.457"]).
+negotiate_version_choice_mismatched_length(_) ->
+ {ok, "1.2.3"} =
+ rabbit_stomp_util:negotiate_version(["1.2", "1.2.3"],
+ ["1.2.3", "1.2"]).
+negotiate_version_choice_duplicates(_) ->
+ {ok, "1.2"} =
+ rabbit_stomp_util:negotiate_version(["1.2", "1.2"],
+ ["1.2", "1.2"]).
+trim_headers(_) ->
+ #stomp_frame{headers = [{"one", "foo"}, {"two", "baz "}]} =
+ rabbit_stomp_util:trim_headers(
+ #stomp_frame{headers = [{"one", " foo"}, {"two", " baz "}]}).
+
+%%--------------------------------------------------------------------
+%% Frame Parsing Tests
+%%--------------------------------------------------------------------
+
+ack_mode_auto(_) ->
+ Frame = #stomp_frame{headers = [{"ack", "auto"}]},
+ {auto, _} = rabbit_stomp_util:ack_mode(Frame).
+
+ack_mode_auto_default(_) ->
+ Frame = #stomp_frame{headers = []},
+ {auto, _} = rabbit_stomp_util:ack_mode(Frame).
+
+ack_mode_client(_) ->
+ Frame = #stomp_frame{headers = [{"ack", "client"}]},
+ {client, true} = rabbit_stomp_util:ack_mode(Frame).
+
+ack_mode_client_individual(_) ->
+ Frame = #stomp_frame{headers = [{"ack", "client-individual"}]},
+ {client, false} = rabbit_stomp_util:ack_mode(Frame).
+
+consumer_tag_id(_) ->
+ Frame = #stomp_frame{headers = [{"id", "foo"}]},
+ {ok, <<"T_foo">>, _} = rabbit_stomp_util:consumer_tag(Frame).
+
+consumer_tag_destination(_) ->
+ Frame = #stomp_frame{headers = [{"destination", "foo"}]},
+ {ok, <<"Q_foo">>, _} = rabbit_stomp_util:consumer_tag(Frame).
+
+consumer_tag_invalid(_) ->
+ Frame = #stomp_frame{headers = []},
+ {error, missing_destination_header} = rabbit_stomp_util:consumer_tag(Frame).
+
+%%--------------------------------------------------------------------
+%% Message ID Parsing Tests
+%%--------------------------------------------------------------------
+
+parse_valid_message_id(_) ->
+ {ok, {<<"bar">>, "abc", 123}} =
+ rabbit_stomp_util:parse_message_id("bar@@abc@@123").
+
+parse_invalid_message_id(_) ->
+ {error, invalid_message_id} =
+ rabbit_stomp_util:parse_message_id("blah").
+