summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.travis.yml4
-rw-r--r--CHANGELOG.md58
-rw-r--r--Makefile10
-rw-r--r--README.md10
-rwxr-xr-xcontrib/inventory/cobbler.py36
-rwxr-xr-xcontrib/inventory/digital_ocean.py11
-rw-r--r--contrib/inventory/ec2.ini5
-rwxr-xr-xcontrib/inventory/ec2.py43
-rwxr-xr-xcontrib/inventory/openstack.py10
-rwxr-xr-x[-rw-r--r--]contrib/inventory/openvz.py0
-rwxr-xr-xcontrib/inventory/proxmox.py178
-rwxr-xr-xcontrib/inventory/rax.py36
-rw-r--r--contrib/inventory/spacewalk.ini16
-rwxr-xr-xcontrib/inventory/spacewalk.py96
-rwxr-xr-xcontrib/inventory/ssh_config.py30
-rw-r--r--contrib/inventory/windows_azure.ini3
-rwxr-xr-xcontrib/inventory/windows_azure.py104
-rw-r--r--docs/man/man1/ansible-playbook.12
-rw-r--r--docs/man/man1/ansible-playbook.1.asciidoc.in2
-rw-r--r--docs/man/man1/ansible.12
-rw-r--r--docs/man/man1/ansible.1.asciidoc.in2
-rw-r--r--docsite/_themes/srtd/layout.html20
-rw-r--r--docsite/_themes/srtd/static/css/theme.css39
-rw-r--r--docsite/_themes/srtd/static/images/banner_ad_1.pngbin0 -> 4510 bytes
-rw-r--r--docsite/_themes/srtd/static/images/banner_ad_2.pngbin0 -> 4951 bytes
-rw-r--r--docsite/conf.py4
-rw-r--r--docsite/rst/become.rst4
-rw-r--r--docsite/rst/developing_modules.rst28
-rw-r--r--docsite/rst/faq.rst6
-rw-r--r--docsite/rst/guide_aws.rst2
-rw-r--r--docsite/rst/intro_configuration.rst11
-rw-r--r--docsite/rst/intro_installation.rst2
-rw-r--r--docsite/rst/intro_inventory.rst4
-rw-r--r--docsite/rst/intro_patterns.rst4
-rw-r--r--docsite/rst/intro_windows.rst40
-rw-r--r--docsite/rst/playbooks_blocks.rst12
-rw-r--r--docsite/rst/playbooks_conditionals.rst5
-rw-r--r--docsite/rst/playbooks_delegation.rst42
-rw-r--r--docsite/rst/playbooks_error_handling.rst14
-rw-r--r--docsite/rst/playbooks_filters.rst20
-rw-r--r--docsite/rst/playbooks_lookups.rst95
-rw-r--r--docsite/rst/playbooks_loops.rst48
-rw-r--r--docsite/rst/playbooks_roles.rst21
-rw-r--r--docsite/rst/playbooks_tags.rst4
-rw-r--r--docsite/rst/playbooks_variables.rst6
-rw-r--r--docsite/rst/playbooks_vault.rst2
-rw-r--r--examples/ansible.cfg5
-rw-r--r--hacking/env-setup4
-rwxr-xr-xhacking/module_formatter.py23
-rwxr-xr-xhacking/test-module13
-rw-r--r--lib/ansible/cli/__init__.py44
-rw-r--r--lib/ansible/cli/adhoc.py35
-rw-r--r--lib/ansible/cli/doc.py4
-rw-r--r--lib/ansible/cli/galaxy.py106
-rw-r--r--lib/ansible/cli/playbook.py66
-rw-r--r--lib/ansible/cli/pull.py16
-rw-r--r--lib/ansible/cli/vault.py3
-rw-r--r--lib/ansible/constants.py22
-rw-r--r--lib/ansible/executor/module_common.py4
-rw-r--r--lib/ansible/executor/play_iterator.py6
-rw-r--r--lib/ansible/executor/playbook_executor.py88
-rw-r--r--lib/ansible/executor/process/result.py4
-rw-r--r--lib/ansible/executor/process/worker.py64
-rw-r--r--lib/ansible/executor/task_executor.py72
-rw-r--r--lib/ansible/galaxy/role.py58
-rw-r--r--lib/ansible/inventory/__init__.py222
-rw-r--r--lib/ansible/inventory/dir.py65
-rw-r--r--lib/ansible/inventory/expand_hosts.py11
-rw-r--r--lib/ansible/inventory/host.py3
-rw-r--r--lib/ansible/inventory/ini.py483
-rw-r--r--lib/ansible/module_utils/basic.py70
-rw-r--r--lib/ansible/module_utils/cloudstack.py40
-rw-r--r--lib/ansible/module_utils/facts.py89
-rw-r--r--lib/ansible/module_utils/powershell.ps142
-rw-r--r--lib/ansible/module_utils/vmware.py (renamed from v1/ansible/module_utils/vmware.py)4
m---------lib/ansible/modules/core15
m---------lib/ansible/modules/extras14
-rw-r--r--lib/ansible/parsing/__init__.py23
-rw-r--r--lib/ansible/parsing/mod_args.py15
-rw-r--r--lib/ansible/playbook/__init__.py21
-rw-r--r--lib/ansible/playbook/attribute.py6
-rw-r--r--lib/ansible/playbook/base.py53
-rw-r--r--lib/ansible/playbook/block.py27
-rw-r--r--lib/ansible/playbook/conditional.py4
-rw-r--r--lib/ansible/playbook/helpers.py12
-rw-r--r--lib/ansible/playbook/included_file.py36
-rw-r--r--lib/ansible/playbook/play.py42
-rw-r--r--lib/ansible/playbook/play_context.py90
-rw-r--r--lib/ansible/playbook/playbook_include.py18
-rw-r--r--lib/ansible/playbook/role/__init__.py31
-rw-r--r--lib/ansible/playbook/role/definition.py17
-rw-r--r--lib/ansible/playbook/role/include.py8
-rw-r--r--lib/ansible/playbook/role/metadata.py2
-rw-r--r--lib/ansible/playbook/task.py60
-rw-r--r--lib/ansible/plugins/__init__.py12
-rw-r--r--lib/ansible/plugins/action/__init__.py56
-rw-r--r--lib/ansible/plugins/action/assemble.py23
-rw-r--r--lib/ansible/plugins/action/copy.py24
-rw-r--r--lib/ansible/plugins/action/fetch.py3
-rw-r--r--lib/ansible/plugins/action/include_vars.py2
-rw-r--r--lib/ansible/plugins/action/normal.py2
-rw-r--r--lib/ansible/plugins/action/package.py6
-rw-r--r--lib/ansible/plugins/action/patch.py2
-rw-r--r--lib/ansible/plugins/action/pause.py113
-rw-r--r--lib/ansible/plugins/action/script.py6
-rw-r--r--lib/ansible/plugins/action/service.py60
-rw-r--r--lib/ansible/plugins/action/set_fact.py39
-rw-r--r--lib/ansible/plugins/action/synchronize.py208
-rw-r--r--lib/ansible/plugins/action/template.py15
-rw-r--r--lib/ansible/plugins/action/unarchive.py7
-rw-r--r--lib/ansible/plugins/action/win_copy.py28
-rw-r--r--lib/ansible/plugins/action/win_template.py28
-rw-r--r--lib/ansible/plugins/callback/__init__.py91
-rw-r--r--lib/ansible/plugins/callback/default.py59
-rw-r--r--lib/ansible/plugins/callback/minimal.py8
-rw-r--r--lib/ansible/plugins/callback/skippy.py60
-rw-r--r--lib/ansible/plugins/callback/tree.py68
-rw-r--r--lib/ansible/plugins/connections/chroot.py58
-rw-r--r--lib/ansible/plugins/connections/paramiko_ssh.py23
-rw-r--r--lib/ansible/plugins/connections/ssh.py62
-rw-r--r--lib/ansible/plugins/connections/winrm.py72
-rw-r--r--lib/ansible/plugins/filter/core.py100
-rw-r--r--lib/ansible/plugins/inventory/README.md1
-rw-r--r--lib/ansible/plugins/inventory/directory.py5
-rw-r--r--lib/ansible/plugins/inventory/ini.py13
-rw-r--r--lib/ansible/plugins/inventory/script.py31
-rw-r--r--lib/ansible/plugins/lookup/__init__.py3
-rw-r--r--lib/ansible/plugins/lookup/cartesian.py12
-rwxr-xr-xlib/ansible/plugins/lookup/consul_kv.py133
-rw-r--r--lib/ansible/plugins/lookup/credstash.py48
-rw-r--r--lib/ansible/plugins/lookup/csvfile.py3
-rw-r--r--lib/ansible/plugins/lookup/dig.py211
-rw-r--r--lib/ansible/plugins/lookup/dnstxt.py3
-rw-r--r--lib/ansible/plugins/lookup/env.py3
-rw-r--r--lib/ansible/plugins/lookup/etcd.py3
-rw-r--r--lib/ansible/plugins/lookup/file.py3
-rw-r--r--lib/ansible/plugins/lookup/first_found.py4
-rw-r--r--lib/ansible/plugins/lookup/flattened.py3
-rw-r--r--lib/ansible/plugins/lookup/hashi_vault.py90
-rw-r--r--lib/ansible/plugins/lookup/indexed_items.py1
-rw-r--r--lib/ansible/plugins/lookup/ini.py93
-rw-r--r--lib/ansible/plugins/lookup/inventory_hostnames.py1
-rw-r--r--lib/ansible/plugins/lookup/items.py3
-rw-r--r--lib/ansible/plugins/lookup/list.py25
-rw-r--r--lib/ansible/plugins/lookup/nested.py15
-rw-r--r--lib/ansible/plugins/lookup/password.py3
-rw-r--r--lib/ansible/plugins/lookup/pipe.py3
-rw-r--r--lib/ansible/plugins/lookup/redis_kv.py3
-rw-r--r--lib/ansible/plugins/lookup/sequence.py18
-rw-r--r--lib/ansible/plugins/lookup/shelvefile.py83
-rw-r--r--lib/ansible/plugins/lookup/subelements.py4
-rw-r--r--lib/ansible/plugins/lookup/template.py15
-rw-r--r--lib/ansible/plugins/lookup/together.py6
-rw-r--r--lib/ansible/plugins/lookup/url.py3
-rw-r--r--lib/ansible/plugins/shell/powershell.py66
-rw-r--r--lib/ansible/plugins/shell/sh.py9
-rw-r--r--lib/ansible/plugins/strategies/__init__.py32
-rw-r--r--lib/ansible/plugins/strategies/free.py34
-rw-r--r--lib/ansible/plugins/strategies/linear.py22
-rw-r--r--lib/ansible/plugins/test/__init__.py0
-rw-r--r--lib/ansible/plugins/test/core.py101
-rw-r--r--lib/ansible/plugins/test/files.py37
-rw-r--r--lib/ansible/template/__init__.py47
-rw-r--r--lib/ansible/template/safe_eval.py8
-rw-r--r--lib/ansible/template/vars.py3
-rw-r--r--lib/ansible/utils/display.py23
-rw-r--r--lib/ansible/utils/listify.py15
-rw-r--r--lib/ansible/vars/__init__.py15
-rw-r--r--lib/ansible/vars/hostvars.py24
-rw-r--r--test/integration/Makefile10
-rw-r--r--test/integration/cloudstack.yml2
-rw-r--r--test/integration/integration_config.yml2
-rw-r--r--test/integration/lookup.ini24
-rw-r--r--test/integration/lookup.properties5
-rw-r--r--test/integration/lookup_paths/play.yml64
-rw-r--r--test/integration/lookup_paths/roles/showfile/tasks/main.yml2
-rw-r--r--test/integration/lookup_paths/testplay.yml19
-rw-r--r--test/integration/roles/test_cs_affinitygroup/tasks/main.yml2
-rw-r--r--test/integration/roles/test_cs_instancegroup/tasks/main.yml2
-rw-r--r--test/integration/roles/test_cs_securitygroup/tasks/main.yml2
-rw-r--r--test/integration/roles/test_cs_sshkeypair/tasks/main.yml2
-rw-r--r--test/integration/roles/test_failed_when/tasks/main.yml2
-rw-r--r--test/integration/roles/test_file/tasks/main.yml2
-rw-r--r--test/integration/roles/test_filters/files/9851.txt3
-rw-r--r--test/integration/roles/test_filters/tasks/main.yml19
-rw-r--r--test/integration/roles/test_iterators/tasks/main.yml8
-rw-r--r--test/integration/roles/test_template/files/foo.txt1
-rw-r--r--test/integration/roles/test_template/vars/main.yml2
-rw-r--r--test/integration/roles/test_win_feature/tasks/main.yml18
-rw-r--r--test/integration/roles/test_win_fetch/tasks/main.yml20
-rw-r--r--test/integration/roles/test_win_file/tasks/main.yml20
-rw-r--r--test/integration/roles/test_win_msi/tasks/main.yml2
-rw-r--r--test/integration/roles/test_win_ping/tasks/main.yml57
-rw-r--r--test/integration/roles/test_win_raw/tasks/main.yml11
-rw-r--r--test/integration/roles/test_win_script/defaults/main.yml1
-rw-r--r--test/integration/roles/test_win_script/files/test_script.cmd2
-rw-r--r--test/integration/roles/test_win_script/files/test_script_creates_file.ps13
-rw-r--r--test/integration/roles/test_win_script/files/test_script_removes_file.ps13
-rw-r--r--test/integration/roles/test_win_script/tasks/main.yml78
-rw-r--r--test/integration/roles/test_win_setup/tasks/main.yml4
-rw-r--r--test/integration/roles/test_win_stat/tasks/main.yml20
-rw-r--r--test/integration/roles/test_win_template/tasks/main.yml15
-rw-r--r--test/integration/roles/test_win_user/tasks/main.yml5
-rw-r--r--test/integration/test_environment.yml52
-rw-r--r--test/integration/test_lookup_properties.yml40
-rw-r--r--test/units/executor/test_task_executor.py3
-rw-r--r--test/units/mock/loader.py6
-rw-r--r--test/units/parsing/test_mod_args.py6
-rw-r--r--test/units/playbook/test_play_context.py17
-rw-r--r--test/units/playbook/test_role.py16
-rw-r--r--test/units/template/test_templar.py6
-rw-r--r--tox.ini5
-rw-r--r--v1/README.md3
-rw-r--r--v1/ansible/utils/template.py3
214 files changed, 4764 insertions, 1592 deletions
diff --git a/.travis.yml b/.travis.yml
index 975bc3e35d..335a8e58e3 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,6 +1,7 @@
sudo: false
language: python
env:
+ - TOKENV=py24
- TOXENV=py26
- TOXENV=py27
addons:
@@ -12,7 +13,8 @@ addons:
install:
- pip install tox PyYAML Jinja2 sphinx
script:
- - tox
+- if test x"$TOKENV" != x'py24' ; then tox ; fi
+- if test x"$TOKENV" = x'py24' ; then python2.4 -V && python2.4 -m compileall -fq -x 'module_utils/(a10|rax|openstack|ec2|gce).py' lib/ansible/module_utils ; fi
#- make -C docsite all
after_success:
- coveralls
diff --git a/CHANGELOG.md b/CHANGELOG.md
index dab562ef92..7c7a911dc8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,16 +8,23 @@ Major Changes:
* New strategy plugins, allow to control the flow of execution of tasks per play, the default will be the same as before
* Improved error handling, now you get much more detailed parser messages. General exception handling and display has been revamped.
* Task includes now get evaluated during execution, end behaviour will be the same but it now allows for more dynamic includes and options.
- * First feature of the more dynamic includes is that with_ loops are now usable with them.
+ * First feature of the more dynamic includes is that "with\_<lookup>" loops are now usable with them.
* callback, connection and lookup plugin APIs have changed, some will require modification to work with new version
* callbacks are now shipped in the active directory and don't need to be copied, just whitelisted in ansible.cfg
* Many API changes, this will break those currently using it directly, but the new API is much easier to use and test
* Settings are now more inheritable, what you set at play, block or role will be automatically inhertited by the contained.
This allows for new features to automatically be settable at all levels, previously we had to manually code this
- * Many more tests, new API makes things more testable and we took advantage of it
- * big_ip modules now support turning off ssl certificate validation (use only for self signed)
* template code now retains types for bools and numbers instead of turning them into strings.
If you need the old behaviour, quote the value and it will get passed around as a string
+ * added meta: refresh_inventory to force rereading the inventory in a play
+ * vars are now settable at play, block, role and task level
+ * template code now retains types for bools, and Numbers instead of turning them into strings
+ If you need the old behaviour, quote the value and it will get passed around as a string. In the
+ case of nulls, the output used to be an empty string.
+ * Empty variables and variables set to null in yaml will no longer be converted to empty strings.
+ They will retain the value of `None`. To go back to the old behaviour, you can override
+ the `null_representation` setting to an empty string in your config file or by setting the
+ `ANSIBLE_NULL_REPRESENTATION` environment variable.
Deprecated Modules (new ones in parens):
* ec2_ami_search (ec2_ami_find)
@@ -31,7 +38,10 @@ New Modules:
* amazon: ec2_ami_find
* amazon: ec2_eni
* amazon: ec2_eni_facts
+ * amazon: ec2_remote_facts
* amazon: ec2_vpc_net
+ * amazon: ec2_vpc_route_table_facts
+ * amazon: ec2_vpc_subnet
* amazon: ec2_win_password
* amazon: elasticache_subnet_group
* amazon: iam
@@ -39,8 +49,12 @@ New Modules:
* amazon: route53_zone
* amazon: sts_assume_role
* amazon: s3_logging
+ * apk
* bundler
+ * centurylink: clc_loadbalancer
+ * centurylink: clc_modify_server
* centurylink: clc_publicip
+ * centurylink: clc_server
* circonus_annotation
* consul
* consul_acl
@@ -49,11 +63,13 @@ New Modules:
* cloudtrail
* cloudstack: cs_account
* cloudstack: cs_affinitygroup
+ * cloudstack: cs_domain
* cloudstack: cs_facts
* cloudstack: cs_firewall
* cloudstack: cs_iso
* cloudstack: cs_instance
* cloudstack: cs_instancegroup
+ * cloudstack: cs_ip_address
* cloudstack: cs_network
* cloudstack: cs_portforward
* cloudstack: cs_project
@@ -89,8 +105,12 @@ New Modules:
* openstack: os_subnet
* openstack: os_volume
* osx_defaults
+ * pam_limits
* pear
* profitbricks: profitbricks
+ * profitbricks: profitbricks_datacenter
+ * profitbricks: profitbricks_nic
+ * profitbricks: profitbricks_volume
* proxmox
* proxmox_template
* puppet
@@ -104,15 +124,21 @@ New Modules:
* rabbitmq_binding
* rabbitmq_exchange
* rabbitmq_queue
+ * selinux_permissive
* sensu_check
* sensu_subscription
+ * seport
+ * slackpkg
+ * solaris_zone
* vertica_configuration
* vertica_facts
* vertica_role
* vertica_schema
* vertica_user
- * vmware_datacenter
- * vsphere_copy
+ * vmware: vmware_datacenter
+ * vmware: vca_fw
+ * vmware: vca_nat
+ * vmware: vsphere_copy
* webfaction_app
* webfaction_db
* webfaction_domain
@@ -127,17 +153,37 @@ New Modules:
* win_iis_website
* win_regedit
* win_unzip
+ * xenserver_facts
* zabbix_host
* zabbix_hostmacro
* zabbix_screen
+ * znode
New Inventory scripts:
* cloudstack
* fleetctl
* openvz
+ * proxmox
* serf
-Other Notable Changes:
+New Lookups:
+ * credstash
+ * hashi_vault
+ * ini
+ * shelvefile
+
+Minor changes:
+
+ * Many more tests, new API makes things more testable and we took advantage of it
+ * big_ip modules now support turning off ssl certificate validation (use only for self signed)
+ * The undocumented semicolon-separated "pattern1;pattern2" syntax to match hosts is no longer supported.
+ * Now when you delegate a action that returns ansible_facts, these facts will now be applied to the delegated host,
+ unlike before which they were applied to the current host.
+ * Consolidated code from modules using urllib2 to normalize features, TLS and SNI support
+ * synchronize module's dest_port parameter now takes precedence over the ansible_ssh_port inventory setting
+ * play output is now dynamically sized to terminal with a minimal of 80 coluumns (old default)
+ * vars_prompt and pause are now skipped with a warning if the play is called non interactively (i.e. pull from cron)
+ * Support for OpenBSD's 'doas' privilege escalation method.
## 1.9.2 "Dancing In the Street" - Jun 26, 2015
diff --git a/Makefile b/Makefile
index 7533e648c5..508be405e9 100644
--- a/Makefile
+++ b/Makefile
@@ -40,6 +40,11 @@ RELEASE := $(shell cat VERSION | cut -f2 -d' ')
# Get the branch information from git
ifneq ($(shell which git),)
GIT_DATE := $(shell git log -n 1 --format="%ai")
+GIT_HASH := $(shell git log -n 1 --format="%h")
+GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD | sed 's/[-_.]//g')
+GITINFO = .$(GIT_HASH).$(GIT_BRANCH)
+else
+GITINFO = ''
endif
ifeq ($(shell echo $(OS) | egrep -c 'Darwin|FreeBSD|OpenBSD'),1)
@@ -62,7 +67,7 @@ ifeq ($(OFFICIAL),yes)
DEBUILD_OPTS += -k$(DEBSIGN_KEYID)
endif
else
- DEB_RELEASE = 0.git$(DATE)
+ DEB_RELEASE = 0.git$(DATE)$(GITINFO)
# Do not sign unofficial builds
DEBUILD_OPTS += -uc -us
DPUT_OPTS += -u
@@ -78,7 +83,7 @@ RPMSPEC = $(RPMSPECDIR)/ansible.spec
RPMDIST = $(shell rpm --eval '%{?dist}')
RPMRELEASE = $(RELEASE)
ifneq ($(OFFICIAL),yes)
- RPMRELEASE = 0.git$(DATE)
+ RPMRELEASE = 0.git$(DATE)$(GITINFO)
endif
RPMNVR = "$(NAME)-$(VERSION)-$(RPMRELEASE)$(RPMDIST)"
@@ -133,6 +138,7 @@ clean:
@echo "Cleaning up distutils stuff"
rm -rf build
rm -rf dist
+ rm -rf lib/ansible.egg-info/
@echo "Cleaning up byte compiled python stuff"
find . -type f -regex ".*\.py[co]$$" -delete
@echo "Cleaning up editor backup files"
diff --git a/README.md b/README.md
index 8bfc18c7ca..f306bd577d 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-[![PyPI version](https://badge.fury.io/py/ansible.png)](http://badge.fury.io/py/ansible)
-[![PyPI downloads](https://pypip.in/d/ansible/badge.png)](https://pypi.python.org/pypi/ansible)
+[![PyPI version](https://img.shields.io/pypi/v/ansible.svg)](https://pypi.python.org/pypi/ansible)
+[![PyPI downloads](https://img.shields.io/pypi/dm/ansible.svg)](https://pypi.python.org/pypi/ansible)
[![Build Status](https://travis-ci.org/ansible/ansible.svg?branch=devel)](https://travis-ci.org/ansible/ansible)
@@ -12,9 +12,9 @@ Read the documentation and more at http://ansible.com/
Many users run straight from the development branch (it's generally fine to do so), but you might also wish to consume a release.
-You can find instructions [here](http://docs.ansible.com/intro_getting_started.html) for a variety of platforms. If you decide to go with the development branch, be sure to run "git submodule update --init --recursive" after doing a checkout.
+You can find instructions [here](http://docs.ansible.com/intro_getting_started.html) for a variety of platforms. If you decide to go with the development branch, be sure to run `git submodule update --init --recursive` after doing a checkout.
-If you want to download a tarball of a release, go to [releases.ansible.com](http://releases.ansible.com/ansible), though most users use yum (using the EPEL instructions linked above), apt (using the PPA instructions linked above), or "pip install ansible".
+If you want to download a tarball of a release, go to [releases.ansible.com](http://releases.ansible.com/ansible), though most users use `yum` (using the EPEL instructions linked above), `apt` (using the PPA instructions linked above), or `pip install ansible`.
Design Principles
=================
@@ -33,7 +33,7 @@ Get Involved
============
* Read [Community Information](http://docs.ansible.com/community.html) for all kinds of ways to contribute to and interact with the project, including mailing list information and how to submit bug reports and code to Ansible.
- * All code submissions are done through pull requests. Take care to make sure no merge commits are in the submission, and use "git rebase" vs "git merge" for this reason. If submitting a large code change (other than modules), it's probably a good idea to join ansible-devel and talk about what you would like to do or add first and to avoid duplicate efforts. This not only helps everyone know what's going on, it also helps save time and effort if we decide some changes are needed.
+ * All code submissions are done through pull requests. Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason. If submitting a large code change (other than modules), it's probably a good idea to join ansible-devel and talk about what you would like to do or add first and to avoid duplicate efforts. This not only helps everyone know what's going on, it also helps save time and effort if we decide some changes are needed.
* Users list: [ansible-project](http://groups.google.com/group/ansible-project)
* Development list: [ansible-devel](http://groups.google.com/group/ansible-devel)
* Announcement list: [ansible-announce](http://groups.google.com/group/ansible-announce) - read only
diff --git a/contrib/inventory/cobbler.py b/contrib/inventory/cobbler.py
index f352c8cf9d..469fac21ad 100755
--- a/contrib/inventory/cobbler.py
+++ b/contrib/inventory/cobbler.py
@@ -30,9 +30,15 @@ See http://ansible.github.com/api.html for more info
Tested with Cobbler 2.0.11.
Changelog:
+ - 2015-06-21 dmccue: Modified to support run-once _meta retrieval, results in
+ higher performance at ansible startup. Groups are determined by owner rather than
+ default mgmt_classes. DNS name determined from hostname. cobbler values are written
+ to a 'cobbler' fact namespace
+
- 2013-09-01 pgehres: Refactored implementation to make use of caching and to
limit the number of connections to external cobbler server for performance.
Added use of cobbler.ini file to configure settings. Tested with Cobbler 2.4.0
+
"""
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
@@ -54,7 +60,6 @@ Changelog:
######################################################################
-
import argparse
import ConfigParser
import os
@@ -71,10 +76,13 @@ except ImportError:
# server, so it does not attempt to login with a username and password.
# this will be addressed in a future version of this script.
+orderby_keyname = 'owners' # alternatively 'mgmt_classes'
+
class CobblerInventory(object):
def __init__(self):
+
""" Main execution path """
self.conn = None
@@ -98,14 +106,12 @@ class CobblerInventory(object):
# Data to print
if self.args.host:
- data_to_print = self.get_host_info()
-
- elif self.args.list:
- # Display list of instances for inventory
- data_to_print = self.json_format_dict(self.inventory, True)
-
- else: # default action with no options
- data_to_print = self.json_format_dict(self.inventory, True)
+ data_to_print += self.get_host_info()
+ else:
+ self.inventory['_meta'] = { 'hostvars': {} }
+ for hostname in self.cache:
+ self.inventory['_meta']['hostvars'][hostname] = {'cobbler': self.cache[hostname] }
+ data_to_print += self.json_format_dict(self.inventory, True)
print data_to_print
@@ -160,21 +166,16 @@ class CobblerInventory(object):
for host in data:
# Get the FQDN for the host and add it to the right groups
- dns_name = None
+ dns_name = host['hostname'] #None
ksmeta = None
interfaces = host['interfaces']
- for (iname, ivalue) in interfaces.iteritems():
- if ivalue['management']:
- this_dns_name = ivalue.get('dns_name', None)
- if this_dns_name is not None and this_dns_name is not "":
- dns_name = this_dns_name
if dns_name is None:
continue
status = host['status']
profile = host['profile']
- classes = host['mgmt_classes']
+ classes = host[orderby_keyname]
if status not in self.inventory:
self.inventory[status] = []
@@ -193,7 +194,7 @@ class CobblerInventory(object):
# The old way was ksmeta only -- provide backwards compatibility
- self.cache[dns_name] = dict()
+ self.cache[dns_name] = host
if "ks_meta" in host:
for key, value in host["ks_meta"].iteritems():
self.cache[dns_name][key] = value
@@ -242,7 +243,6 @@ class CobblerInventory(object):
def write_to_cache(self, data, filename):
""" Writes data in JSON format to a file """
-
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
diff --git a/contrib/inventory/digital_ocean.py b/contrib/inventory/digital_ocean.py
index 4f312e7c24..1927f09fdf 100755
--- a/contrib/inventory/digital_ocean.py
+++ b/contrib/inventory/digital_ocean.py
@@ -55,6 +55,7 @@ When run against a specific host, this script returns the following variables:
- do_id
- do_image - object
- do_ip_address
+ - do_private_ip_address
- do_kernel - object
- do_locked
- de_memory
@@ -344,7 +345,15 @@ or environment variables (DO_API_TOKEN)'''
# add all droplets by id and name
for droplet in self.data['droplets']:
- dest = droplet['ip_address']
+ #when using private_networking, the API reports the private one in "ip_address", which is useless. We need the public one for Ansible to work
+ if 'private_networking' in droplet['features']:
+ for net in droplet['networks']['v4']:
+ if net['type']=='public':
+ dest=net['ip_address']
+ else:
+ continue
+ else:
+ dest = droplet['ip_address']
self.inventory[droplet['id']] = [dest]
self.push(self.inventory, droplet['name'], dest)
diff --git a/contrib/inventory/ec2.ini b/contrib/inventory/ec2.ini
index a1d9b1d805..50430ce0ed 100644
--- a/contrib/inventory/ec2.ini
+++ b/contrib/inventory/ec2.ini
@@ -58,6 +58,11 @@ route53 = False
# 'all_instances' to True to return all instances regardless of state.
all_instances = False
+# By default, only EC2 instances in the 'running' state are returned. Specify
+# EC2 instance states to return as a comma-separated list. This
+# option is overriden when 'all_instances' is True.
+# instance_states = pending, running, shutting-down, terminated, stopping, stopped
+
# By default, only RDS instances in the 'available' state are returned. Set
# 'all_rds_instances' to True return all RDS instances regardless of state.
all_rds_instances = False
diff --git a/contrib/inventory/ec2.py b/contrib/inventory/ec2.py
index a8e042e3f4..e4b0b072d4 100755
--- a/contrib/inventory/ec2.py
+++ b/contrib/inventory/ec2.py
@@ -244,6 +244,28 @@ class Ec2Inventory(object):
else:
self.all_instances = False
+ # Instance states to be gathered in inventory. Default is 'running'.
+ # Setting 'all_instances' to 'yes' overrides this option.
+ ec2_valid_instance_states = [
+ 'pending',
+ 'running',
+ 'shutting-down',
+ 'terminated',
+ 'stopping',
+ 'stopped'
+ ]
+ self.ec2_instance_states = []
+ if self.all_instances:
+ self.ec2_instance_states = ec2_valid_instance_states
+ elif config.has_option('ec2', 'instance_states'):
+ for instance_state in config.get('ec2', 'instance_states').split(','):
+ instance_state = instance_state.strip()
+ if instance_state not in ec2_valid_instance_states:
+ continue
+ self.ec2_instance_states.append(instance_state)
+ else:
+ self.ec2_instance_states = ['running']
+
# Return all RDS instances? (if RDS is enabled)
if config.has_option('ec2', 'all_rds_instances') and self.rds_enabled:
self.all_rds_instances = config.getboolean('ec2', 'all_rds_instances')
@@ -406,7 +428,7 @@ class Ec2Inventory(object):
else:
backend = 'Eucalyptus' if self.eucalyptus else 'AWS'
error = "Error connecting to %s backend.\n%s" % (backend, e.message)
- self.fail_with_error(error)
+ self.fail_with_error(error, 'getting EC2 instances')
def get_rds_instances_by_region(self, region):
''' Makes an AWS API call to the list of RDS instances in a particular
@@ -425,7 +447,7 @@ class Ec2Inventory(object):
error = self.get_auth_error_message()
if not e.reason == "Forbidden":
error = "Looks like AWS RDS is down:\n%s" % e.message
- self.fail_with_error(error)
+ self.fail_with_error(error, 'getting RDS instances')
def get_elasticache_clusters_by_region(self, region):
''' Makes an AWS API call to the list of ElastiCache clusters (with
@@ -448,7 +470,7 @@ class Ec2Inventory(object):
error = self.get_auth_error_message()
if not e.reason == "Forbidden":
error = "Looks like AWS ElastiCache is down:\n%s" % e.message
- self.fail_with_error(error)
+ self.fail_with_error(error, 'getting ElastiCache clusters')
try:
# Boto also doesn't provide wrapper classes to CacheClusters or
@@ -458,7 +480,7 @@ class Ec2Inventory(object):
except KeyError as e:
error = "ElastiCache query to AWS failed (unexpected format)."
- self.fail_with_error(error)
+ self.fail_with_error(error, 'getting ElastiCache clusters')
for cluster in clusters:
self.add_elasticache_cluster(cluster, region)
@@ -482,7 +504,7 @@ class Ec2Inventory(object):
error = self.get_auth_error_message()
if not e.reason == "Forbidden":
error = "Looks like AWS ElastiCache [Replication Groups] is down:\n%s" % e.message
- self.fail_with_error(error)
+ self.fail_with_error(error, 'getting ElastiCache clusters')
try:
# Boto also doesn't provide wrapper classes to ReplicationGroups
@@ -492,7 +514,7 @@ class Ec2Inventory(object):
except KeyError as e:
error = "ElastiCache [Replication Groups] query to AWS failed (unexpected format)."
- self.fail_with_error(error)
+ self.fail_with_error(error, 'getting ElastiCache clusters')
for replication_group in replication_groups:
self.add_elasticache_replication_group(replication_group, region)
@@ -514,8 +536,11 @@ class Ec2Inventory(object):
return '\n'.join(errors)
- def fail_with_error(self, err_msg):
+ def fail_with_error(self, err_msg, err_operation=None):
'''log an error to std err for ansible-playbook to consume and exit'''
+ if err_operation:
+ err_msg = 'ERROR: "{err_msg}", while: {err_operation}'.format(
+ err_msg=err_msg, err_operation=err_operation)
sys.stderr.write(err_msg)
sys.exit(1)
@@ -531,8 +556,8 @@ class Ec2Inventory(object):
''' Adds an instance to the inventory and index, as long as it is
addressable '''
- # Only want running instances unless all_instances is True
- if not self.all_instances and instance.state != 'running':
+ # Only return instances with desired instance states
+ if instance.state not in self.ec2_instance_states:
return
# Select the best destination address
diff --git a/contrib/inventory/openstack.py b/contrib/inventory/openstack.py
index 819380c796..103be1bee0 100755
--- a/contrib/inventory/openstack.py
+++ b/contrib/inventory/openstack.py
@@ -51,11 +51,12 @@ import shade
class OpenStackInventory(object):
def __init__(self, private=False, refresh=False):
+ config_files = os_client_config.config.CONFIG_FILES
+ config_files.append('/etc/ansible/openstack.yml')
self.openstack_config = os_client_config.config.OpenStackConfig(
- os_client_config.config.CONFIG_FILES.append(
- '/etc/ansible/openstack.yml'),
- private)
+ config_files)
self.clouds = shade.openstack_clouds(self.openstack_config)
+ self.private = private
self.refresh = refresh
self.cache_max_age = self.openstack_config.get_cache_max_age()
@@ -92,6 +93,7 @@ class OpenStackInventory(object):
hostvars = collections.defaultdict(dict)
for cloud in self.clouds:
+ cloud.private = cloud.private or self.private
# Cycle on servers
for server in cloud.list_servers():
@@ -152,7 +154,7 @@ def main():
elif args.host:
inventory.get_host(args.host)
except shade.OpenStackCloudException as e:
- print(e.message)
+ sys.stderr.write('%s\n' % e.message)
sys.exit(1)
sys.exit(0)
diff --git a/contrib/inventory/openvz.py b/contrib/inventory/openvz.py
index fd0bd9ff79..fd0bd9ff79 100644..100755
--- a/contrib/inventory/openvz.py
+++ b/contrib/inventory/openvz.py
diff --git a/contrib/inventory/proxmox.py b/contrib/inventory/proxmox.py
new file mode 100755
index 0000000000..80f6628d97
--- /dev/null
+++ b/contrib/inventory/proxmox.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2014 Mathieu GAUTHIER-LAFAYE <gauthierl@lapth.cnrs.fr>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import urllib
+import urllib2
+try:
+ import json
+except ImportError:
+ import simplejson as json
+import os
+import sys
+from optparse import OptionParser
+
+class ProxmoxNodeList(list):
+ def get_names(self):
+ return [node['node'] for node in self]
+
+class ProxmoxQemu(dict):
+ def get_variables(self):
+ variables = {}
+ for key, value in self.iteritems():
+ variables['proxmox_' + key] = value
+ return variables
+
+class ProxmoxQemuList(list):
+ def __init__(self, data=[]):
+ for item in data:
+ self.append(ProxmoxQemu(item))
+
+ def get_names(self):
+ return [qemu['name'] for qemu in self if qemu['template'] != 1]
+
+ def get_by_name(self, name):
+ results = [qemu for qemu in self if qemu['name'] == name]
+ return results[0] if len(results) > 0 else None
+
+ def get_variables(self):
+ variables = {}
+ for qemu in self:
+ variables[qemu['name']] = qemu.get_variables()
+
+ return variables
+
+class ProxmoxPoolList(list):
+ def get_names(self):
+ return [pool['poolid'] for pool in self]
+
+class ProxmoxPool(dict):
+ def get_members_name(self):
+ return [member['name'] for member in self['members'] if member['template'] != 1]
+
+class ProxmoxAPI(object):
+ def __init__(self, options):
+ self.options = options
+ self.credentials = None
+
+ if not options.url:
+ raise Exception('Missing mandatory parameter --url (or PROXMOX_URL).')
+ elif not options.username:
+ raise Exception('Missing mandatory parameter --username (or PROXMOX_USERNAME).')
+ elif not options.password:
+ raise Exception('Missing mandatory parameter --password (or PROXMOX_PASSWORD).')
+
+ def auth(self):
+ request_path = '{}api2/json/access/ticket'.format(self.options.url)
+
+ request_params = urllib.urlencode({
+ 'username': self.options.username,
+ 'password': self.options.password,
+ })
+
+ data = json.load(urllib2.urlopen(request_path, request_params))
+
+ self.credentials = {
+ 'ticket': data['data']['ticket'],
+ 'CSRFPreventionToken': data['data']['CSRFPreventionToken'],
+ }
+
+ def get(self, url, data=None):
+ opener = urllib2.build_opener()
+ opener.addheaders.append(('Cookie', 'PVEAuthCookie={}'.format(self.credentials['ticket'])))
+
+ request_path = '{}{}'.format(self.options.url, url)
+ request = opener.open(request_path, data)
+
+ response = json.load(request)
+ return response['data']
+
+ def nodes(self):
+ return ProxmoxNodeList(self.get('api2/json/nodes'))
+
+ def node_qemu(self, node):
+ return ProxmoxQemuList(self.get('api2/json/nodes/{}/qemu'.format(node)))
+
+ def pools(self):
+ return ProxmoxPoolList(self.get('api2/json/pools'))
+
+ def pool(self, poolid):
+ return ProxmoxPool(self.get('api2/json/pools/{}'.format(poolid)))
+
+def main_list(options):
+ results = {
+ 'all': {
+ 'hosts': [],
+ },
+ '_meta': {
+ 'hostvars': {},
+ }
+ }
+
+ proxmox_api = ProxmoxAPI(options)
+ proxmox_api.auth()
+
+ for node in proxmox_api.nodes().get_names():
+ qemu_list = proxmox_api.node_qemu(node)
+ results['all']['hosts'] += qemu_list.get_names()
+ results['_meta']['hostvars'].update(qemu_list.get_variables())
+
+ # pools
+ for pool in proxmox_api.pools().get_names():
+ results[pool] = {
+ 'hosts': proxmox_api.pool(pool).get_members_name(),
+ }
+
+ return results
+
+def main_host(options):
+ proxmox_api = ProxmoxAPI(options)
+ proxmox_api.auth()
+
+ for node in proxmox_api.nodes().get_names():
+ qemu_list = proxmox_api.node_qemu(node)
+ qemu = qemu_list.get_by_name(options.host)
+ if qemu:
+ return qemu.get_variables()
+
+ return {}
+
+def main():
+ parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME')
+ parser.add_option('--list', action="store_true", default=False, dest="list")
+ parser.add_option('--host', dest="host")
+ parser.add_option('--url', default=os.environ.get('PROXMOX_URL'), dest='url')
+ parser.add_option('--username', default=os.environ.get('PROXMOX_USERNAME'), dest='username')
+ parser.add_option('--password', default=os.environ.get('PROXMOX_PASSWORD'), dest='password')
+ parser.add_option('--pretty', action="store_true", default=False, dest='pretty')
+ (options, args) = parser.parse_args()
+
+ if options.list:
+ data = main_list(options)
+ elif options.host:
+ data = main_host(options)
+ else:
+ parser.print_help()
+ sys.exit(1)
+
+ indent = None
+ if options.pretty:
+ indent = 2
+
+ print json.dumps(data, indent=indent)
+
+if __name__ == '__main__':
+ main()
diff --git a/contrib/inventory/rax.py b/contrib/inventory/rax.py
index 10b72d322b..a42bbfcfef 100755
--- a/contrib/inventory/rax.py
+++ b/contrib/inventory/rax.py
@@ -167,6 +167,9 @@ except ImportError:
print('pyrax is required for this module')
sys.exit(1)
+from time import time
+
+
NON_CALLABLES = (basestring, bool, dict, int, list, type(None))
@@ -214,7 +217,7 @@ def host(regions, hostname):
print(json.dumps(hostvars, sort_keys=True, indent=4))
-def _list(regions):
+def _list_into_cache(regions):
groups = collections.defaultdict(list)
hostvars = collections.defaultdict(dict)
images = {}
@@ -334,7 +337,31 @@ def _list(regions):
if hostvars:
groups['_meta'] = {'hostvars': hostvars}
- print(json.dumps(groups, sort_keys=True, indent=4))
+
+ with open(get_cache_file_path(regions), 'w') as cache_file:
+ json.dump(groups, cache_file)
+
+
+def get_cache_file_path(regions):
+ regions_str = '.'.join([reg.strip().lower() for reg in regions])
+ ansible_tmp_path = os.path.join(os.path.expanduser("~"), '.ansible', 'tmp')
+ if not os.path.exists(ansible_tmp_path):
+ os.makedirs(ansible_tmp_path)
+ return os.path.join(ansible_tmp_path,
+ 'ansible-rax-%s-%s.cache' % (
+ pyrax.identity.username, regions_str))
+
+
+def _list(regions, refresh_cache=True):
+ if (not os.path.exists(get_cache_file_path(regions)) or
+ refresh_cache or
+ (time() - os.stat(get_cache_file_path(regions))[-1]) > 600):
+ # Cache file doesn't exist or older than 10m or refresh cache requested
+ _list_into_cache(regions)
+
+ with open(get_cache_file_path(regions), 'r') as cache_file:
+ groups = json.load(cache_file)
+ print(json.dumps(groups, sort_keys=True, indent=4))
def parse_args():
@@ -344,6 +371,9 @@ def parse_args():
group.add_argument('--list', action='store_true',
help='List active servers')
group.add_argument('--host', help='List details about the specific host')
+ parser.add_argument('--refresh-cache', action='store_true', default=False,
+ help=('Force refresh of cache, making API requests to'
+ 'RackSpace (default: False - use cache files)'))
return parser.parse_args()
@@ -410,7 +440,7 @@ def main():
args = parse_args()
regions = setup()
if args.list:
- _list(regions)
+ _list(regions, refresh_cache=args.refresh_cache)
elif args.host:
host(regions, args.host)
sys.exit(0)
diff --git a/contrib/inventory/spacewalk.ini b/contrib/inventory/spacewalk.ini
new file mode 100644
index 0000000000..5433c4221b
--- /dev/null
+++ b/contrib/inventory/spacewalk.ini
@@ -0,0 +1,16 @@
+# Put this ini-file in the same directory as spacewalk.py
+# Command line options have precedence over options defined in here.
+
+[spacewalk]
+# To limit the script on one organization in spacewalk, uncomment org_number
+# and fill in the organization ID:
+# org_number=2
+
+# To prefix the group names with the organization ID set prefix_org_name=true.
+# This is convenient when org_number is not set and you have the same group names
+# in multiple organizations within spacewalk
+# The prefix is "org_number-"
+prefix_org_name=false
+
+# Default cache_age for files created with spacewalk-report is 300sec.
+cache_age=300
diff --git a/contrib/inventory/spacewalk.py b/contrib/inventory/spacewalk.py
index 89e045056c..b853ca18ba 100755
--- a/contrib/inventory/spacewalk.py
+++ b/contrib/inventory/spacewalk.py
@@ -16,12 +16,16 @@ This script is dependent upon the spacealk-reports package being installed
on the same machine. It is basically a CSV-to-JSON converter from the
output of "spacewalk-report system-groups-systems|inventory".
-Tested with Ansible 1.1
+Tested with Ansible 1.9.2 and spacewalk 2.3
"""
#
# Author:: Jon Miller <jonEbird@gmail.com>
# Copyright:: Copyright (c) 2013, Jon Miller
#
+# Extended for support of multiple organizations and
+# adding the "_meta" dictionary to --list output by
+# Bernhard Lichtinger <bernhard.lichtinger@lrz.de> 2015
+#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or (at
@@ -41,6 +45,7 @@ import os
import time
from optparse import OptionParser
import subprocess
+import ConfigParser
try:
import json
@@ -51,8 +56,9 @@ base_dir = os.path.dirname(os.path.realpath(__file__))
SW_REPORT = '/usr/bin/spacewalk-report'
CACHE_DIR = os.path.join(base_dir, ".spacewalk_reports")
CACHE_AGE = 300 # 5min
+INI_FILE = os.path.join(base_dir, "spacewalk.ini")
-# Sanity check
+ # Sanity check
if not os.path.exists(SW_REPORT):
print >> sys.stderr, 'Error: %s is required for operation.' % (SW_REPORT)
sys.exit(1)
@@ -80,6 +86,8 @@ def spacewalk_report(name):
lines = open(cache_filename, 'r').readlines()
keys = lines[0].strip().split(',')
+ # add 'spacewalk_' prefix to the keys
+ keys = [ 'spacewalk_' + key for key in keys ]
for line in lines[1:]:
values = line.strip().split(',')
if len(keys) == len(values):
@@ -97,20 +105,85 @@ parser.add_option('--host', default=None, dest="host",
parser.add_option('-H', '--human', dest="human",
default=False, action="store_true",
help="Produce a friendlier version of either server list or host detail")
+parser.add_option('-o', '--org', default=None, dest="org_number",
+ help="Limit to spacewalk organization number")
+parser.add_option('-p', default=False, dest="prefix_org_name", action="store_true",
+ help="Prefix the group name with the organization number")
(options, args) = parser.parse_args()
+# read spacewalk.ini if present
+#------------------------------
+if os.path.exists(INI_FILE):
+ config = ConfigParser.SafeConfigParser()
+ config.read(INI_FILE)
+ if config.has_option('spacewalk' , 'cache_age'):
+ CACHE_AGE = config.get('spacewalk' , 'cache_age')
+ if not options.org_number and config.has_option('spacewalk' , 'org_number'):
+ options.org_number = config.get('spacewalk' , 'org_number')
+ if not options.prefix_org_name and config.has_option('spacewalk' , 'prefix_org_name'):
+ options.prefix_org_name = config.getboolean('spacewalk' , 'prefix_org_name')
+
+
+# Generate dictionary for mapping group_id to org_id
+#------------------------------
+org_groups = {}
+try:
+ for group in spacewalk_report('system-groups'):
+ org_groups[group['spacewalk_group_id']] = group['spacewalk_org_id']
+
+except (OSError), e:
+ print >> sys.stderr, 'Problem executing the command "%s system-groups": %s' % \
+ (SW_REPORT, str(e))
+ sys.exit(2)
+
+
# List out the known server from Spacewalk
#------------------------------
if options.list:
+ # to build the "_meta"-Group with hostvars first create dictionary for later use
+ host_vars = {}
+ try:
+ for item in spacewalk_report('inventory'):
+ host_vars[ item['spacewalk_profile_name'] ] = dict( ( key, ( value.split(';') if ';' in value else value) ) for key, value in item.items() )
+
+ except (OSError), e:
+ print >> sys.stderr, 'Problem executing the command "%s inventory": %s' % \
+ (SW_REPORT, str(e))
+ sys.exit(2)
+
groups = {}
+ meta = { "hostvars" : {} }
try:
for system in spacewalk_report('system-groups-systems'):
- if system['group_name'] not in groups:
- groups[system['group_name']] = set()
-
- groups[system['group_name']].add(system['server_name'])
+ # first get org_id of system
+ org_id = org_groups[ system['spacewalk_group_id'] ]
+
+ # shall we add the org_id as prefix to the group name:
+ if options.prefix_org_name:
+ prefix = org_id + "-"
+ group_name = prefix + system['spacewalk_group_name']
+ else:
+ group_name = system['spacewalk_group_name']
+
+ # if we are limited to one organization:
+ if options.org_number:
+ if org_id == options.org_number:
+ if group_name not in groups:
+ groups[group_name] = set()
+
+ groups[group_name].add(system['spacewalk_server_name'])
+ if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta[ "hostvars" ]:
+ meta[ "hostvars" ][ system['spacewalk_server_name'] ] = host_vars[ system['spacewalk_server_name'] ]
+ # or we list all groups and systems:
+ else:
+ if group_name not in groups:
+ groups[group_name] = set()
+
+ groups[group_name].add(system['spacewalk_server_name'])
+ if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta[ "hostvars" ]:
+ meta[ "hostvars" ][ system['spacewalk_server_name'] ] = host_vars[ system['spacewalk_server_name'] ]
except (OSError), e:
print >> sys.stderr, 'Problem executing the command "%s system-groups-systems": %s' % \
@@ -121,8 +194,10 @@ if options.list:
for group, systems in groups.iteritems():
print '[%s]\n%s\n' % (group, '\n'.join(systems))
else:
- print json.dumps(dict([ (k, list(s)) for k, s in groups.iteritems() ]))
-
+ final = dict( [ (k, list(s)) for k, s in groups.iteritems() ] )
+ final["_meta"] = meta
+ print json.dumps( final )
+ #print json.dumps(groups)
sys.exit(0)
@@ -133,7 +208,7 @@ elif options.host:
host_details = {}
try:
for system in spacewalk_report('inventory'):
- if system['hostname'] == options.host:
+ if system['spacewalk_hostname'] == options.host:
host_details = system
break
@@ -147,8 +222,7 @@ elif options.host:
for k, v in host_details.iteritems():
print ' %s: %s' % (k, '\n '.join(v.split(';')))
else:
- print json.dumps(host_details)
-
+ print json.dumps( dict( ( key, ( value.split(';') if ';' in value else value) ) for key, value in host_details.items() ) )
sys.exit(0)
else:
diff --git a/contrib/inventory/ssh_config.py b/contrib/inventory/ssh_config.py
index 7c04c8cc6d..55401a664d 100755
--- a/contrib/inventory/ssh_config.py
+++ b/contrib/inventory/ssh_config.py
@@ -19,6 +19,10 @@
# Dynamic inventory script which lets you use aliases from ~/.ssh/config.
#
+# There were some issues with various Paramiko versions. I took a deeper look
+# and tested heavily. Now, ansible parses this alright with Paramiko versions
+# 1.7.2 to 1.15.2.
+#
# It prints inventory based on parsed ~/.ssh/config. You can refer to hosts
# with their alias, rather than with the IP or hostname. It takes advantage
# of the ansible_ssh_{host,port,user,private_key_file}.
@@ -39,7 +43,6 @@
import argparse
import os.path
import sys
-
import paramiko
try:
@@ -47,6 +50,8 @@ try:
except ImportError:
import simplejson as json
+SSH_CONF = '~/.ssh/config'
+
_key = 'ssh_config'
_ssh_to_ansible = [('user', 'ansible_ssh_user'),
@@ -56,15 +61,25 @@ _ssh_to_ansible = [('user', 'ansible_ssh_user'),
def get_config():
- with open(os.path.expanduser('~/.ssh/config')) as f:
+ if not os.path.isfile(os.path.expanduser(SSH_CONF)):
+ return {}
+ with open(os.path.expanduser(SSH_CONF)) as f:
cfg = paramiko.SSHConfig()
cfg.parse(f)
ret_dict = {}
for d in cfg._config:
+ if type(d['host']) is list:
+ alias = d['host'][0]
+ else:
+ alias = d['host']
+ if ('?' in alias) or ('*' in alias):
+ continue
_copy = dict(d)
del _copy['host']
- for host in d['host']:
- ret_dict[host] = _copy['config']
+ if 'config' in _copy:
+ ret_dict[alias] = _copy['config']
+ else:
+ ret_dict[alias] = _copy
return ret_dict
@@ -75,7 +90,12 @@ def print_list():
tmp_dict = {}
for ssh_opt, ans_opt in _ssh_to_ansible:
if ssh_opt in attributes:
- tmp_dict[ans_opt] = attributes[ssh_opt]
+ # If the attribute is a list, just take the first element.
+ # Private key is returned in a list for some reason.
+ attr = attributes[ssh_opt]
+ if type(attr) is list:
+ attr = attr[0]
+ tmp_dict[ans_opt] = attr
if tmp_dict:
meta['hostvars'][alias] = tmp_dict
diff --git a/contrib/inventory/windows_azure.ini b/contrib/inventory/windows_azure.ini
index 133a5e5ff6..c37f79c629 100644
--- a/contrib/inventory/windows_azure.ini
+++ b/contrib/inventory/windows_azure.ini
@@ -13,8 +13,9 @@
# API calls to Windows Azure may be slow. For this reason, we cache the results
# of an API call. Set this to the path you want cache files to be written to.
-# One file will be written to this directory:
+# Two files will be written to this directory:
# - ansible-azure.cache
+# - ansible-azure.index
#
cache_path = /tmp
diff --git a/contrib/inventory/windows_azure.py b/contrib/inventory/windows_azure.py
index 1302967907..9b5197ffc8 100755
--- a/contrib/inventory/windows_azure.py
+++ b/contrib/inventory/windows_azure.py
@@ -65,6 +65,14 @@ class AzureInventory(object):
self.inventory = {}
# Index of deployment name -> host
self.index = {}
+ self.host_metadata = {}
+
+ # Cache setting defaults.
+ # These can be overridden in settings (see `read_settings`).
+ cache_dir = os.path.expanduser('~')
+ self.cache_path_cache = os.path.join(cache_dir, '.ansible-azure.cache')
+ self.cache_path_index = os.path.join(cache_dir, '.ansible-azure.index')
+ self.cache_max_age = 0
# Read settings and parse CLI arguments
self.read_settings()
@@ -82,15 +90,37 @@ class AzureInventory(object):
if self.args.list_images:
data_to_print = self.json_format_dict(self.get_images(), True)
- elif self.args.list:
+ elif self.args.list or self.args.host:
# Display list of nodes for inventory
if len(self.inventory) == 0:
- data_to_print = self.get_inventory_from_cache()
+ data = json.loads(self.get_inventory_from_cache())
else:
- data_to_print = self.json_format_dict(self.inventory, True)
+ data = self.inventory
+ if self.args.host:
+ data_to_print = self.get_host(self.args.host)
+ else:
+ # Add the `['_meta']['hostvars']` information.
+ hostvars = {}
+ if len(data) > 0:
+ for host in set([h for hosts in data.values() for h in hosts if h]):
+ hostvars[host] = self.get_host(host, jsonify=False)
+ data['_meta'] = {'hostvars': hostvars}
+
+ # JSONify the data.
+ data_to_print = self.json_format_dict(data, pretty=True)
print data_to_print
+ def get_host(self, hostname, jsonify=True):
+ """Return information about the given hostname, based on what
+ the Windows Azure API provides.
+ """
+ if hostname not in self.host_metadata:
+ return "No host found: %s" % json.dumps(self.host_metadata)
+ if jsonify:
+ return json.dumps(self.host_metadata[hostname])
+ return self.host_metadata[hostname]
+
def get_images(self):
images = []
for image in self.sms.list_os_images():
@@ -121,28 +151,36 @@ class AzureInventory(object):
# Cache related
if config.has_option('azure', 'cache_path'):
- cache_path = config.get('azure', 'cache_path')
- self.cache_path_cache = cache_path + "/ansible-azure.cache"
- self.cache_path_index = cache_path + "/ansible-azure.index"
+ cache_path = os.path.expandvars(os.path.expanduser(config.get('azure', 'cache_path')))
+ self.cache_path_cache = os.path.join(cache_path, 'ansible-azure.cache')
+ self.cache_path_index = os.path.join(cache_path, 'ansible-azure.index')
if config.has_option('azure', 'cache_max_age'):
self.cache_max_age = config.getint('azure', 'cache_max_age')
def read_environment(self):
''' Reads the settings from environment variables '''
# Credentials
- if os.getenv("AZURE_SUBSCRIPTION_ID"): self.subscription_id = os.getenv("AZURE_SUBSCRIPTION_ID")
- if os.getenv("AZURE_CERT_PATH"): self.cert_path = os.getenv("AZURE_CERT_PATH")
-
+ if os.getenv("AZURE_SUBSCRIPTION_ID"):
+ self.subscription_id = os.getenv("AZURE_SUBSCRIPTION_ID")
+ if os.getenv("AZURE_CERT_PATH"):
+ self.cert_path = os.getenv("AZURE_CERT_PATH")
def parse_cli_args(self):
"""Command line argument processing"""
- parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on Azure')
+ parser = argparse.ArgumentParser(
+ description='Produce an Ansible Inventory file based on Azure',
+ )
parser.add_argument('--list', action='store_true', default=True,
- help='List nodes (default: True)')
+ help='List nodes (default: True)')
parser.add_argument('--list-images', action='store',
- help='Get all available images.')
- parser.add_argument('--refresh-cache', action='store_true', default=False,
- help='Force refresh of cache by making API requests to Azure (default: False - use cache files)')
+ help='Get all available images.')
+ parser.add_argument('--refresh-cache',
+ action='store_true', default=False,
+ help='Force refresh of thecache by making API requests to Azure '
+ '(default: False - use cache files)',
+ )
+ parser.add_argument('--host', action='store',
+ help='Get all information about an instance.')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
@@ -163,11 +201,12 @@ class AzureInventory(object):
sys.exit(1)
def add_deployments(self, cloud_service):
- """Makes an Azure API call to get the list of virtual machines associated with a cloud service"""
+ """Makes an Azure API call to get the list of virtual machines
+ associated with a cloud service.
+ """
try:
for deployment in self.sms.get_hosted_service_properties(cloud_service.service_name,embed_detail=True).deployments.deployments:
- if deployment.deployment_slot == "Production":
- self.add_deployment(cloud_service, deployment)
+ self.add_deployment(cloud_service, deployment)
except WindowsAzureError as e:
print "Looks like Azure's API is down:"
print
@@ -176,20 +215,43 @@ class AzureInventory(object):
def add_deployment(self, cloud_service, deployment):
"""Adds a deployment to the inventory and index"""
+ for role in deployment.role_instance_list.role_instances:
+ try:
+ # Default port 22 unless port found with name 'SSH'
+ port = '22'
+ for ie in role.instance_endpoints.instance_endpoints:
+ if ie.name == 'SSH':
+ port = ie.public_port
+ break
+ except AttributeError as e:
+ pass
+ finally:
+ self.add_instance(role.instance_name, deployment, port, cloud_service, role.instance_status)
+
+ def add_instance(self, hostname, deployment, ssh_port, cloud_service, status):
+ """Adds an instance to the inventory and index"""
dest = urlparse(deployment.url).hostname
# Add to index
- self.index[dest] = deployment.name
+ self.index[hostname] = deployment.name
+
+ self.host_metadata[hostname] = dict(ansible_ssh_host=dest,
+ ansible_ssh_port=int(ssh_port),
+ instance_status=status,
+ private_id=deployment.private_id)
# List of all azure deployments
- self.push(self.inventory, "azure", dest)
+ self.push(self.inventory, "azure", hostname)
# Inventory: Group by service name
- self.push(self.inventory, self.to_safe(cloud_service.service_name), dest)
+ self.push(self.inventory, self.to_safe(cloud_service.service_name), hostname)
+
+ if int(ssh_port) == 22:
+ self.push(self.inventory, "Cloud_services", hostname)
# Inventory: Group by region
- self.push(self.inventory, self.to_safe(cloud_service.hosted_service_properties.location), dest)
+ self.push(self.inventory, self.to_safe(cloud_service.hosted_service_properties.location), hostname)
def push(self, my_dict, key, element):
"""Pushed an element onto an array that may not have been defined in the dict."""
diff --git a/docs/man/man1/ansible-playbook.1 b/docs/man/man1/ansible-playbook.1
index 0c820b72e3..cfc33d1744 100644
--- a/docs/man/man1/ansible-playbook.1
+++ b/docs/man/man1/ansible-playbook.1
@@ -60,7 +60,7 @@ Run operations with become (nopasswd implied)
.PP
\fB\-\-become\-method=BECOME_METHOD\fR
.RS 4
-Privilege escalation method to use (default=sudo), valid choices: [ sudo | su | pbrun | pfexec | runas ]
+Privilege escalation method to use (default=sudo), valid choices: [ sudo | su | pbrun | pfexec | runas | doas ]
.RE
.PP
\fB\-\-become\-user=BECOME_USER\fR
diff --git a/docs/man/man1/ansible-playbook.1.asciidoc.in b/docs/man/man1/ansible-playbook.1.asciidoc.in
index 8b8ba9c468..00682567e8 100644
--- a/docs/man/man1/ansible-playbook.1.asciidoc.in
+++ b/docs/man/man1/ansible-playbook.1.asciidoc.in
@@ -51,7 +51,7 @@ Run operations with become (nopasswd implied)
*--become-method=BECOME_METHOD*::
Privilege escalation method to use (default=sudo),
-valid choices: [ sudo | su | pbrun | pfexec | runas ]
+valid choices: [ sudo | su | pbrun | pfexec | runas | doas ]
*--become-user=BECOME_USER*::
diff --git a/docs/man/man1/ansible.1 b/docs/man/man1/ansible.1
index 83bfc0500d..7c6e79da8d 100644
--- a/docs/man/man1/ansible.1
+++ b/docs/man/man1/ansible.1
@@ -84,7 +84,7 @@ seconds\&.
.PP
\fB\-\-become\-method=\fR\fIBECOME_METHOD\fR
.RS 4
-Privilege escalation method to use (default=sudo), valid choices: [ sudo | su | pbrun | pfexec | runas ]
+Privilege escalation method to use (default=sudo), valid choices: [ sudo | su | pbrun | pfexec | runas | doas ]
.RE
.PP
\fB\-\-become\-user=\fR\fIBECOME_USER\fR
diff --git a/docs/man/man1/ansible.1.asciidoc.in b/docs/man/man1/ansible.1.asciidoc.in
index 26bd0144d4..aaaac33c2a 100644
--- a/docs/man/man1/ansible.1.asciidoc.in
+++ b/docs/man/man1/ansible.1.asciidoc.in
@@ -65,7 +65,7 @@ Run commands in the background, killing the task after 'NUM' seconds.
*--become-method=*'BECOME_METHOD'::
Privilege escalation method to use (default=sudo),
-valid choices: [ sudo | su | pbrun | pfexec | runas ]
+valid choices: [ sudo | su | pbrun | pfexec | runas | doas ]
*--become-user=*'BECOME_USER'::
diff --git a/docsite/_themes/srtd/layout.html b/docsite/_themes/srtd/layout.html
index 74fb6008ba..f4d7a8a536 100644
--- a/docsite/_themes/srtd/layout.html
+++ b/docsite/_themes/srtd/layout.html
@@ -196,17 +196,15 @@
<div class="wy-nav-content">
<div class="rst-content">
-<!-- AnsibleFest and free eBook preview stuff -->
-<center>
-<a href="http://www.ansible.com/tower?utm_source=docs">
-<img src="http://www.ansible.com/hubfs/Docs_Ads/TowerDocs.png">
-</a>
-<a href="http://www.ansible.com/ansible-book?utm_source=docs">
-<img src="http://www.ansible.com/hubfs/Docs_Ads/Get_2_full_chapters_of_Ansible-_Up_.png">
-</a>
-<br/>&nbsp;<br/>
-<br/>&nbsp;<br/>
-</center>
+ <!-- Tower ads -->
+ <a class="DocSiteBanner" href="http://www.ansible.com/tower?utm_source=docs">
+ <div class="DocSiteBanner-imgWrapper">
+ <img src="{{ pathto('_static/', 1) }}images/banner_ad_1.png">
+ </div>
+ <div class="DocSiteBanner-imgWrapper">
+ <img src="{{ pathto('_static/', 1) }}images/banner_ad_2.png">
+ </div>
+ </a>
{% include "breadcrumbs.html" %}
<div id="page-content">
diff --git a/docsite/_themes/srtd/static/css/theme.css b/docsite/_themes/srtd/static/css/theme.css
index 29a1c6ba57..4f7cbc8caa 100644
--- a/docsite/_themes/srtd/static/css/theme.css
+++ b/docsite/_themes/srtd/static/css/theme.css
@@ -4714,3 +4714,42 @@ span[id*='MathJax-Span'] {
color: #fff;
font-size: 20px;
}
+
+.wy-menu-vertical a {
+ padding: 0;
+}
+
+.wy-menu-vertical a.reference.internal {
+ padding: 0.4045em 1.618em;
+}
+
+
+.DocSiteBanner {
+ width: 100%;
+ display: flex;
+ display: -webkit-flex;
+ flex-wrap: wrap;
+ -webkit-flex-wrap: wrap;
+ justify-content: space-between;
+ -webkit-justify-content: space-between;
+ background-color: #ff5850;
+ margin-bottom: 25px;
+}
+
+.DocSiteBanner-imgWrapper {
+ max-width: 100%;
+}
+
+@media screen and (max-width: 1403px) {
+ .DocSiteBanner {
+ width: 100%;
+ display: flex;
+ display: -webkit-flex;
+ flex-wrap: wrap;
+ -webkit-flex-wrap: wrap;
+ justify-content: center;
+ -webkit-justify-content: center;
+ background-color: #fff;
+ margin-bottom: 25px;
+ }
+}
diff --git a/docsite/_themes/srtd/static/images/banner_ad_1.png b/docsite/_themes/srtd/static/images/banner_ad_1.png
new file mode 100644
index 0000000000..a6555f2567
--- /dev/null
+++ b/docsite/_themes/srtd/static/images/banner_ad_1.png
Binary files differ
diff --git a/docsite/_themes/srtd/static/images/banner_ad_2.png b/docsite/_themes/srtd/static/images/banner_ad_2.png
new file mode 100644
index 0000000000..f9d6c6d42c
--- /dev/null
+++ b/docsite/_themes/srtd/static/images/banner_ad_2.png
Binary files differ
diff --git a/docsite/conf.py b/docsite/conf.py
index 61c6de5c11..95bc1fb832 100644
--- a/docsite/conf.py
+++ b/docsite/conf.py
@@ -55,7 +55,7 @@ master_doc = 'index'
# General substitutions.
project = 'Ansible Documentation'
-copyright = "2013 Ansible, Inc"
+copyright = "2013-2015 Ansible, Inc"
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
@@ -100,6 +100,8 @@ exclude_patterns = ['modules']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
+highlight_language = 'YAML'
+
# Options for HTML output
# -----------------------
diff --git a/docsite/rst/become.rst b/docsite/rst/become.rst
index 0d09e4116e..c8738ef08a 100644
--- a/docsite/rst/become.rst
+++ b/docsite/rst/become.rst
@@ -23,7 +23,7 @@ become_user
equivalent to adding 'sudo_user:' or 'su_user:' to a play or task, set to user with desired privileges
become_method
- at play or task level overrides the default method set in ansible.cfg, set to 'sudo'/'su'/'pbrun'/'pfexec'
+ at play or task level overrides the default method set in ansible.cfg, set to 'sudo'/'su'/'pbrun'/'pfexec'/'doas'
New ansible\_ variables
@@ -54,7 +54,7 @@ New command line options
--become-method=BECOME_METHOD
privilege escalation method to use (default=sudo),
- valid choices: [ sudo | su | pbrun | pfexec ]
+ valid choices: [ sudo | su | pbrun | pfexec | doas ]
--become-user=BECOME_USER
run operations as this user (default=root)
diff --git a/docsite/rst/developing_modules.rst b/docsite/rst/developing_modules.rst
index 1b6a6887fe..76fdbd9a44 100644
--- a/docsite/rst/developing_modules.rst
+++ b/docsite/rst/developing_modules.rst
@@ -343,7 +343,7 @@ and guidelines:
* If packaging modules in an RPM, they only need to be installed on the control machine and should be dropped into /usr/share/ansible. This is entirely optional and up to you.
-* Modules should output valid JSON only. All return types must be hashes (dictionaries) although they can be nested. Lists or simple scalar values are not supported, though they can be trivially contained inside a dictionary.
+* Modules must output valid JSON only. The toplevel return type must be a hash (dictionary) although they can be nested. Lists or simple scalar values are not supported, though they can be trivially contained inside a dictionary.
* In the event of failure, a key of 'failed' should be included, along with a string explanation in 'msg'. Modules that raise tracebacks (stacktraces) are generally considered 'poor' modules, though Ansible can deal with these returns and will automatically convert anything unparseable into a failed result. If you are using the AnsibleModule common Python code, the 'failed' element will be included for you automatically when you call 'fail_json'.
@@ -381,8 +381,8 @@ Include it in your module file like this::
# ... snip ...
'''
-The ``description``, and ``notes`` fields
-support formatting with some special macros.
+The ``description``, and ``notes`` fields
+support formatting with some special macros.
These formatting functions are ``U()``, ``M()``, ``I()``, and ``C()``
for URL, module, italic, and constant-width respectively. It is suggested
@@ -425,11 +425,12 @@ built and appear in the 'docsite/' directory.
Module Paths
````````````
-If you are having trouble getting your module "found" by ansible, be sure it is in the ANSIBLE_LIBRARY_PATH.
+If you are having trouble getting your module "found" by ansible, be
+sure it is in the ``ANSIBLE_LIBRARY`` environment variable.
If you have a fork of one of the ansible module projects, do something like this::
- ANSIBLE_LIBRARY=~/ansible-modules-core:~/ansible-modules-extras
+ ANSIBLE_LIBRARY=~/ansible-modules-core:~/ansible-modules-extras
And this will make the items in your fork be loaded ahead of what ships with Ansible. Just be sure
to make sure you're not reporting bugs on versions from your fork!
@@ -465,7 +466,7 @@ Module checklist
* Requirements should be documented, using the `requirements=[]` field
* Author should be set, name and github id at least
* Made use of U() for urls, C() for files and options, I() for params, M() for modules?
- * GPL License header
+ * GPL 3 License header
* Does module use check_mode? Could it be modified to use it? Document it
* Examples: make sure they are reproducible
* Return: document the return structure of the module
@@ -494,8 +495,15 @@ Module checklist
* Being pep8 compliant is nice, but not a requirement. Specifically, the 80 column limit now hinders readability more that it improves it
* Avoid '`action`/`command`', they are imperative and not declarative, there are other ways to express the same thing
* Sometimes you want to split the module, specially if you are adding a list/info state, you want a _facts version
-* If you are asking 'how can i have a module execute other modules' ... you want to write a role
-
+* If you are asking 'how can I have a module execute other modules' ... you want to write a role
+* Return values must be able to be serialized as json via the python stdlib
+ json library. basic python types (strings, int, dicts, lists, etc) are
+ serializable. A common pitfall is to try returning an object via
+ exit_json(). Instead, convert the fields you need from the object into the
+ fields of a dictionary and return the dictionary.
+* Do not use urllib2 to handle urls. urllib2 does not natively verify TLS
+ certificates and so is insecure for https. Instead, use either fetch_url or
+ open_url from ansible.module_utils.urls.
Windows modules checklist
`````````````````````````
@@ -548,7 +556,7 @@ then::
Deprecating and making module aliases
``````````````````````````````````````
-Starting in 1.8 you can deprecate modules by renaming them with a preceding _, i.e. old_cloud.py to
+Starting in 1.8 you can deprecate modules by renaming them with a preceding _, i.e. old_cloud.py to
_old_cloud.py, This will keep the module available but hide it from the primary docs and listing.
You can also rename modules and keep an alias to the old name by using a symlink that starts with _.
@@ -577,5 +585,3 @@ This example allows the stat module to be called with fileinfo, making the follo
Development mailing list
`irc.freenode.net <http://irc.freenode.net>`_
#ansible IRC chat channel
-
-
diff --git a/docsite/rst/faq.rst b/docsite/rst/faq.rst
index faac872fad..4635bb57d9 100644
--- a/docsite/rst/faq.rst
+++ b/docsite/rst/faq.rst
@@ -8,7 +8,7 @@ Here are some commonly-asked questions and their answers.
How can I set the PATH or any other environment variable for a task or entire playbook?
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-Setting environment variables can be done with the `environment` keyword. It can be used at task or playbook level::
+Setting environment variables can be done with the `environment` keyword. It can be used at task or play level::
environment:
PATH: "{{ ansible_env.PATH }}:/thingy/bin"
@@ -235,7 +235,9 @@ Once the library is ready, SHA512 password values can then be generated as follo
Can I get training on Ansible or find commercial support?
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-Yes! See `our Guru offering <http://www.ansible.com/ansible-guru>`_ for online support, and support is also included with :doc:`tower`. You can also read our `service page <http://www.ansible.com/ansible-services>`_ and email `info@ansible.com <mailto:info@ansible.com>`_ for further details.
+Yes! See our `services page <http://www.ansible.com/services>`_ for information on our services and training offerings. Support is also included with :doc:`tower`. Email `info@ansible.com <mailto:info@ansible.com>`_ for further details.
+
+We also offer free web-based training classes on a regular basis. See our `webinar page <http://www.ansible.com/webinars-training>`_ for more info on upcoming webinars.
.. _web_interface:
diff --git a/docsite/rst/guide_aws.rst b/docsite/rst/guide_aws.rst
index e0d0c12630..cea9bb9999 100644
--- a/docsite/rst/guide_aws.rst
+++ b/docsite/rst/guide_aws.rst
@@ -166,7 +166,7 @@ Similar groups are available for regions and other classifications, and can be s
Autoscaling with Ansible Pull
`````````````````````````````
-Amazon Autoscaling features automatically increase or decrease capacity based on load. There are also Ansible ansibles shown in the cloud documentation that
+Amazon Autoscaling features automatically increase or decrease capacity based on load. There are also Ansible modules shown in the cloud documentation that
can configure autoscaling policy.
When nodes come online, it may not be sufficient to wait for the next cycle of an ansible command to come along and configure that node.
diff --git a/docsite/rst/intro_configuration.rst b/docsite/rst/intro_configuration.rst
index 466e37fcbf..2cf03a70d2 100644
--- a/docsite/rst/intro_configuration.rst
+++ b/docsite/rst/intro_configuration.rst
@@ -115,6 +115,15 @@ sudoing. The default behavior is also no::
Users on platforms where sudo passwords are enabled should consider changing this setting.
+.. _ask_vault_pass:
+
+ask_vault_pass
+==============
+
+This controls whether an Ansible playbook should prompt for the vault password by default. The default behavior is no::
+
+ ask_vault_pass=True
+
.. _bin_ansible_callbacks:
bin_ansible_callbacks
@@ -642,7 +651,7 @@ The equivalent of adding sudo: or su: to a play or task, set to true/yes to acti
become_method
=============
-Set the privilege escalation method. The default is ``sudo``, other options are ``su``, ``pbrun``, ``pfexec``::
+Set the privilege escalation method. The default is ``sudo``, other options are ``su``, ``pbrun``, ``pfexec``, ``doas``::
become_method=su
diff --git a/docsite/rst/intro_installation.rst b/docsite/rst/intro_installation.rst
index 472c158eac..50a2803aad 100644
--- a/docsite/rst/intro_installation.rst
+++ b/docsite/rst/intro_installation.rst
@@ -105,7 +105,7 @@ open source projects.
.. note::
- If you are intending to use Tower as the Control Machine, do not use a source install. Please use apt/yum/pip for a stable version
+ If you are intending to use Tower as the Control Machine, do not use a source install. Please use OS package manager (eg. apt/yum) or pip to install a stable version.
To install from source.
diff --git a/docsite/rst/intro_inventory.rst b/docsite/rst/intro_inventory.rst
index 3265821831..5afffb0fe5 100644
--- a/docsite/rst/intro_inventory.rst
+++ b/docsite/rst/intro_inventory.rst
@@ -199,7 +199,7 @@ Host connection::
ansible_connection
Connection type to the host. Candidates are local, smart, ssh or paramiko. The default is smart.
-Ssh connection::
+SSH connection::
ansible_ssh_host
The name of the host to connect to, if different from the alias you wish to give to it.
@@ -223,7 +223,7 @@ Privilege escalation (see :doc:`Ansible Privilege Escalation<become>` for furthe
ansible_become_pass
Equivalent to ansible_sudo_pass or ansible_su_pass, allows you to set the privilege escalation password
-Remote host environnement parameters::
+Remote host environment parameters::
ansible_shell_type
The shell type of the target system. Commands are formatted using 'sh'-style syntax by default. Setting this to 'csh' or 'fish' will cause commands executed on target systems to follow those shell's syntax instead.
diff --git a/docsite/rst/intro_patterns.rst b/docsite/rst/intro_patterns.rst
index 579276a3af..07160b2182 100644
--- a/docsite/rst/intro_patterns.rst
+++ b/docsite/rst/intro_patterns.rst
@@ -72,9 +72,9 @@ As an advanced usage, you can also select the numbered server in a group::
webservers[0]
-Or a portion of servers in a group::
+Or a range of servers in a group::
- webservers[0-25]
+ webservers[0:25]
Most people don't specify patterns as regular expressions, but you can. Just start the pattern with a '~'::
diff --git a/docsite/rst/intro_windows.rst b/docsite/rst/intro_windows.rst
index 645248fde5..9bda3e5489 100644
--- a/docsite/rst/intro_windows.rst
+++ b/docsite/rst/intro_windows.rst
@@ -28,10 +28,46 @@ On a Linux control machine::
pip install https://github.com/diyan/pywinrm/archive/master.zip#egg=pywinrm
-If you wish to connect to domain accounts published through Active Directory (as opposed to local accounts created on the remote host)::
+Active Directory Support
+++++++++++++++++++++++++
- pip install kerberos
+If you wish to connect to domain accounts published through Active Directory (as opposed to local accounts created on the remote host), you will need to install the "python-kerberos" module and the MIT krb5 libraries it depends on.
+
+Installing python-kerberos dependencies
+---------------------------------------
+
+.. code-block:: bash
+
+ # Via Yum
+ yum -y install python-devel krb5-devel krb5-libs krb5-workstation
+
+ # Via Apt (Ubuntu)
+ sudo apt-get install python-dev libkrb5-dev
+
+ # Via Portage (Gentoo)
+ emerge -av app-crypt/mit-krb5
+ emerge -av dev-python/setuptools
+
+ # Via pkg (FreeBSD)
+ sudo pkg install security/krb5
+
+ # Via OpenCSW (Solaris)
+ pkgadd -d http://get.opencsw.org/now
+ /opt/csw/bin/pkgutil -U
+ /opt/csw/bin/pkgutil -y -i libkrb5_3
+
+ # Via Pacman (Arch Linux)
+ pacman -S krb5
+Installing python-kerberos
+--------------------------
+
+Once you've installed the necessary dependencies, the python-kerberos wrapper can be installed via pip::
+
+.. code-block:: bash
+
+ pip install kerberos
+
Kerberos is installed and configured by default on OS X and many Linux distributions. If your control machine has not already done this for you, you will need to.
.. _windows_inventory:
diff --git a/docsite/rst/playbooks_blocks.rst b/docsite/rst/playbooks_blocks.rst
index 563d3300e6..4d391c7618 100644
--- a/docsite/rst/playbooks_blocks.rst
+++ b/docsite/rst/playbooks_blocks.rst
@@ -7,11 +7,12 @@ at the block level, which also makes it much easier to set data or directives co
to the tasks.
-Example::
+.. code-block:: YAML
+ :emphasize-lines: 2
+ :caption: Block example
tasks:
- block:
-
- yum: name={{ item }} state=installed
with_items:
- httpd
@@ -25,6 +26,7 @@ Example::
become: true
become_user: root
+
In the example above the 3 tasks will be executed only when the block's when condition is met and enables
privilege escalation for all the enclosed tasks.
@@ -35,7 +37,11 @@ Error Handling
``````````````
About Blocks
-Blocks also introduce the ability to handle errors in a way similar to exceptions in most programming languages.::
+Blocks also introduce the ability to handle errors in a way similar to exceptions in most programming languages.
+
+.. code-block:: YAML
+ :emphasize-lines: 2,6,10
+ :caption: Block error handling example
tasks:
- block:
diff --git a/docsite/rst/playbooks_conditionals.rst b/docsite/rst/playbooks_conditionals.rst
index f441ed659e..ed818ee286 100644
--- a/docsite/rst/playbooks_conditionals.rst
+++ b/docsite/rst/playbooks_conditionals.rst
@@ -119,12 +119,13 @@ Applying 'when' to roles and includes
`````````````````````````````````````
Note that if you have several tasks that all share the same conditional statement, you can affix the conditional
-to a task include statement as below. Note this does not work with playbook includes, just task includes. All the tasks
-get evaluated, but the conditional is applied to each and every task::
+to a task include statement as below. All the tasks get evaluated, but the conditional is applied to each and every task::
- include: tasks/sometasks.yml
when: "'reticulating splines' in output"
+.. note:: In versions prior to 2.0 this worked with task includes but not playbook includes. 2.0 allows it to work with both.
+
Or with a role::
- hosts: webservers
diff --git a/docsite/rst/playbooks_delegation.rst b/docsite/rst/playbooks_delegation.rst
index 20981503df..4411e4aa29 100644
--- a/docsite/rst/playbooks_delegation.rst
+++ b/docsite/rst/playbooks_delegation.rst
@@ -185,6 +185,48 @@ use the default remote connection type::
- hosts: 127.0.0.1
connection: local
+.. _interrupt_execution_on_any_error:
+
+Interrupt execution on any error
+````````````````````````````````
+
+With option ''any_errors_fatal'' any failure on any host in a multi-host play will be treated as fatal and Ansible will exit immediately without waiting for the other hosts.
+
+Sometimes ''serial'' execution is unsuitable - number of hosts is unpredictable (because of dynamic inventory), speed is crucial (simultaneous execution is required). But all tasks must be 100% successful to continue playbook execution.
+
+For example there is a service located in many datacenters, there a some load balancers to pass traffic from users to service. There is a deploy playbook to upgrade service deb-packages. Playbook stages:
+
+- disable traffic on load balancers (must be turned off simultaneously)
+- gracefully stop service
+- upgrade software (this step includes tests and starting service)
+- enable traffic on load balancers (should be turned off simultaneously)
+
+Service can't be stopped with "alive" load balancers, they must be disabled, all of them. So second stage can't be played if any server failed on "stage 1".
+
+For datacenter "A" playbook can be written this way::
+
+ ---
+ - hosts: load_balancers_dc_a
+ any_errors_fatal: True
+ tasks:
+ - name: 'shutting down datacenter [ A ]'
+ command: /usr/bin/disable-dc
+
+ - hosts: frontends_dc_a
+ tasks:
+ - name: 'stopping service'
+ command: /usr/bin/stop-software
+ - name: 'updating software'
+ command: /usr/bin/upgrade-software
+
+ - hosts: load_balancers_dc_a
+ tasks:
+ - name: 'Starting datacenter [ A ]'
+ command: /usr/bin/enable-dc
+
+
+In this example Ansible will start software upgrade on frontends only if all load balancers are successfully disabled.
+
.. seealso::
:doc:`playbooks`
diff --git a/docsite/rst/playbooks_error_handling.rst b/docsite/rst/playbooks_error_handling.rst
index ac573d86ba..fb12990b3f 100644
--- a/docsite/rst/playbooks_error_handling.rst
+++ b/docsite/rst/playbooks_error_handling.rst
@@ -103,6 +103,20 @@ does not cause handlers to fire::
- shell: wall 'beep'
changed_when: False
+Aborting the play
+`````````````````
+
+Sometimes it's desirable to abort the entire play on failure, not just skip remaining tasks for a host.
+
+The ``any_errors_fatal`` play option will mark all hosts as failed if any fails, causing an immediate abort.
+
+ - hosts: somehosts
+ any_errors_fatal: true
+ roles:
+ - myrole
+
+for finer-grained control ``max_fail_percentage`` can be used to abort the run after a given percentage of hosts has failed.
+
.. seealso::
diff --git a/docsite/rst/playbooks_filters.rst b/docsite/rst/playbooks_filters.rst
index d95f617f19..857fc770ba 100644
--- a/docsite/rst/playbooks_filters.rst
+++ b/docsite/rst/playbooks_filters.rst
@@ -338,10 +338,30 @@ To get the last name of a file path, like 'foo.txt' out of '/etc/asdf/foo.txt'::
{{ path | basename }}
+To get the last name of a windows style file path (new in version 2.0)::
+
+ {{ path | win_basename }}
+
+To separate the windows drive letter from the rest of a file path (new in version 2.0)::
+
+ {{ path | win_splitdrive }}
+
+To get only the windows drive letter
+
+ {{ path | win_splitdrive | first }}
+
+To get the rest of the path without the drive letter
+
+ {{ path | win_splitdrive | last }}
+
To get the directory from a path::
{{ path | dirname }}
+To get the directory from a windows path (new version 2.0)::
+
+ {{ path | win_dirname }}
+
To expand a path containing a tilde (`~`) character (new in version 1.5)::
{{ path | expanduser }}
diff --git a/docsite/rst/playbooks_lookups.rst b/docsite/rst/playbooks_lookups.rst
index a7d459c800..237a43e254 100644
--- a/docsite/rst/playbooks_lookups.rst
+++ b/docsite/rst/playbooks_lookups.rst
@@ -139,6 +139,101 @@ default empty string return value if the key is not in the csv file
.. note:: The default delimiter is TAB, *not* comma.
+.. _ini_lookup:
+
+The INI File Lookup
+```````````````````
+.. versionadded:: 2.0
+
+The ``ini`` lookup reads the contents of a file in INI format (key1=value1).
+This plugin retrieve the value on the right side after the equal sign ('=') of
+a given section ([section]). You can also read a property file which - in this
+case - does not contain section.
+
+Here's a simple example of an INI file with user/password configuration::
+
+ [production]
+ # My production information
+ user=robert
+ pass=somerandompassword
+
+ [integration]
+ # My integration information
+ user=gertrude
+ pass=anotherpassword
+
+
+We can use the ``ini`` plugin to lookup user configuration::
+
+ - debug: msg="User in integration is {{ lookup('ini', 'user section=integration file=users.ini') }}"
+ - debug: msg="User in production is {{ lookup('ini', 'user section=production file=users.ini') }}"
+
+Another example for this plugin is for looking for a value on java properties.
+Here's a simple properties we'll take as an example::
+
+ user.name=robert
+ user.pass=somerandompassword
+
+You can retrieve the ``user.name`` field with the following lookup::
+
+ - debug: msg="user.name is {{ lookup('ini', 'user.name type=property file=user.properties') }}"
+
+The ``ini`` lookup supports several arguments like the csv plugin. The format for passing
+arguments is::
+
+ lookup('ini', 'key [type=<properties|ini>] [section=section] [file=file.ini] [re=true] [default=<defaultvalue>]')
+
+The first value in the argument is the ``key``, which must be an entry that
+appears exactly once on keys. All other arguments are optional.
+
+
+========== ============ =========================================================================================
+Field Default Description
+---------- ------------ -----------------------------------------------------------------------------------------
+type ini Type of the file. Can be ini or properties (for java properties).
+file ansible.ini Name of the file to load
+section global Default section where to lookup for key.
+re False The key is a regexp.
+default empty string return value if the key is not in the ini file
+========== ============ =========================================================================================
+
+.. note:: In java properties files, there's no need to specify a section.
+
+.. _credstash_lookup:
+
+The Credstash Lookup
+````````````````````
+
+Credstash is a small utility for managing secrets using AWS's KMS and DynamoDB: https://github.com/LuminalOSS/credstash
+
+First, you need to store your secrets with credstash::
+
+
+ $ credstash put my-github-password secure123
+
+ my-github-password has been stored
+
+
+Example usage::
+
+
+ ---
+ - name: "Test credstash lookup plugin -- get my github password"
+ debug: msg="Credstash lookup! {{ lookup('credstash', 'my-github-password') }}"
+
+
+You can specify regions or tables to fetch secrets from::
+
+
+ ---
+ - name: "Test credstash lookup plugin -- get my other password from us-west-1"
+ debug: msg="Credstash lookup! {{ lookup('credstash', 'my-other-password', region='us-west-1') }}"
+
+
+ - name: "Test credstash lookup plugin -- get the company's github password"
+ debug: msg="Credstash lookup! {{ lookup('credstash', 'company-github-password', table='company-passwords') }}"
+
+
.. _more_lookups:
diff --git a/docsite/rst/playbooks_loops.rst b/docsite/rst/playbooks_loops.rst
index a76254a966..5acc6ae630 100644
--- a/docsite/rst/playbooks_loops.rst
+++ b/docsite/rst/playbooks_loops.rst
@@ -347,6 +347,54 @@ It's uncommonly used::
debug: msg="at array position {{ item.0 }} there is a value {{ item.1 }}"
with_indexed_items: "{{some_list}}"
+.. _using_ini_with_a_loop:
+
+Using ini file with a loop
+``````````````````````````
+.. versionadded: 2.0
+
+The ini plugin can use regexp to retrieve a set of keys. As a consequence, we can loop over this set. Here is the ini file we'll use::
+
+ [section1]
+ value1=section1/value1
+ value2=section1/value2
+
+ [section2]
+ value1=section2/value1
+ value2=section2/value2
+
+Here is an example of using ``with_ini``::
+
+ - debug: msg="{{item}}"
+ with_ini: value[1-2] section=section1 file=lookup.ini re=true
+
+And here is the returned value::
+
+ {
+ "changed": false,
+ "msg": "All items completed",
+ "results": [
+ {
+ "invocation": {
+ "module_args": "msg=\"section1/value1\"",
+ "module_name": "debug"
+ },
+ "item": "section1/value1",
+ "msg": "section1/value1",
+ "verbose_always": true
+ },
+ {
+ "invocation": {
+ "module_args": "msg=\"section1/value2\"",
+ "module_name": "debug"
+ },
+ "item": "section1/value2",
+ "msg": "section1/value2",
+ "verbose_always": true
+ }
+ ]
+ }
+
.. _flattening_a_list:
Flattening A List
diff --git a/docsite/rst/playbooks_roles.rst b/docsite/rst/playbooks_roles.rst
index 52e69a66df..2cc98117a4 100644
--- a/docsite/rst/playbooks_roles.rst
+++ b/docsite/rst/playbooks_roles.rst
@@ -65,19 +65,6 @@ contain all of my wordpress tasks in a single wordpress.yml file, and use it lik
- include: wordpress.yml wp_user=alice
- include: wordpress.yml wp_user=bob
-If you are running Ansible 1.4 and later, include syntax is streamlined to match roles, and also allows passing list and dictionary parameters::
-
- tasks:
- - { include: wordpress.yml, wp_user: timmy, ssh_keys: [ 'keys/one.txt', 'keys/two.txt' ] }
-
-Using either syntax, variables passed in can then be used in the included files. We'll cover them in :doc:`playbooks_variables`.
-You can reference them like this::
-
- {{ wp_user }}
-
-(In addition to the explicitly passed-in parameters, all variables from
-the vars section are also available for use here as well.)
-
Starting in 1.0, variables can also be passed to include files using an alternative syntax,
which also supports structured variables::
@@ -90,6 +77,14 @@ which also supports structured variables::
- keys/one.txt
- keys/two.txt
+Using either syntax, variables passed in can then be used in the included files. We'll cover them in :doc:`playbooks_variables`.
+You can reference them like this::
+
+ {{ wp_user }}
+
+(In addition to the explicitly passed-in parameters, all variables from
+the vars section are also available for use here as well.)
+
Playbooks can include other playbooks too, but that's mentioned in a later section.
.. note::
diff --git a/docsite/rst/playbooks_tags.rst b/docsite/rst/playbooks_tags.rst
index 76101717b7..7876fdc3d8 100644
--- a/docsite/rst/playbooks_tags.rst
+++ b/docsite/rst/playbooks_tags.rst
@@ -60,7 +60,9 @@ Example::
- tag1
There are another 3 special keywords for tags, 'tagged', 'untagged' and 'all', which run only tagged, only untagged
-and all tasks respectively. By default ansible runs as if '--tags all' had been specified.
+and all tasks respectively.
+
+By default ansible runs as if '--tags all' had been specified.
.. seealso::
diff --git a/docsite/rst/playbooks_variables.rst b/docsite/rst/playbooks_variables.rst
index 619a3cf07b..c47dfe0fc7 100644
--- a/docsite/rst/playbooks_variables.rst
+++ b/docsite/rst/playbooks_variables.rst
@@ -525,13 +525,13 @@ To configure fact caching using jsonfile, enable it in ansible.cfg as follows::
[defaults]
gathering = smart
fact_caching = jsonfile
- fact_caching_location = /path/to/cachedir
+ fact_caching_connection = /path/to/cachedir
fact_caching_timeout = 86400
# seconds
-`fact_caching_location` is a local filesystem path to a writeable
+`fact_caching_connection` is a local filesystem path to a writeable
directory (ansible will attempt to create the directory if one does not exist).
-
+
.. _registered_variables:
Registered Variables
diff --git a/docsite/rst/playbooks_vault.rst b/docsite/rst/playbooks_vault.rst
index cd294788b8..9dce71e2f8 100644
--- a/docsite/rst/playbooks_vault.rst
+++ b/docsite/rst/playbooks_vault.rst
@@ -5,7 +5,7 @@ Vault
New in Ansible 1.5, "Vault" is a feature of ansible that allows keeping sensitive data such as passwords or keys in encrypted files, rather than as plaintext in your playbooks or roles. These vault files can then be distributed or placed in source control.
-To enable this feature, a command line tool, `ansible-vault` is used to edit files, and a command line flag `--ask-vault-pass` or `--vault-password-file` is used. Alternately, you may specify the location of a password file in your ansible.cfg file. This option requires no command line flag usage.
+To enable this feature, a command line tool, `ansible-vault` is used to edit files, and a command line flag `--ask-vault-pass` or `--vault-password-file` is used. Alternately, you may specify the location of a password file or command Ansible to always prompt for the password in your ansible.cfg file. These options require no command line flag usage.
.. _what_can_be_encrypted_with_vault:
diff --git a/examples/ansible.cfg b/examples/ansible.cfg
index b6845c0703..4ab8cca172 100644
--- a/examples/ansible.cfg
+++ b/examples/ansible.cfg
@@ -46,8 +46,9 @@ gathering = implicit
# change this for alternative sudo implementations
sudo_exe = sudo
-# what flags to pass to sudo
-#sudo_flags = -H
+# What flags to pass to sudo
+# WARNING: leaving out the defaults might create unexpected behaviours
+#sudo_flags = -H -k
# SSH timeout
timeout = 10
diff --git a/hacking/env-setup b/hacking/env-setup
index 7b48db28db..8ba483279b 100644
--- a/hacking/env-setup
+++ b/hacking/env-setup
@@ -25,7 +25,7 @@ fi
# The below is an alternative to readlink -fn which doesn't exist on OS X
# Source: http://stackoverflow.com/a/1678636
FULL_PATH=$(python -c "import os; print(os.path.realpath('$HACKING_DIR'))")
-ANSIBLE_HOME=$(dirname "$FULL_PATH")
+export ANSIBLE_HOME=$(dirname "$FULL_PATH")
PREFIX_PYTHONPATH="$ANSIBLE_HOME/lib"
PREFIX_PATH="$ANSIBLE_HOME/bin"
@@ -43,7 +43,7 @@ expr "$MANPATH" : "${PREFIX_MANPATH}.*" > /dev/null || export MANPATH="$PREFIX_M
gen_egg_info()
{
if [ -e "$PREFIX_PYTHONPATH/ansible.egg-info" ] ; then
- rm -r "$PREFIX_PYTHONPATH/ansible.egg-info"
+ \rm -r "$PREFIX_PYTHONPATH/ansible.egg-info"
fi
python setup.py egg_info
}
diff --git a/hacking/module_formatter.py b/hacking/module_formatter.py
index 31fea52973..30b8d6a103 100755
--- a/hacking/module_formatter.py
+++ b/hacking/module_formatter.py
@@ -36,6 +36,7 @@ from jinja2 import Environment, FileSystemLoader
from ansible.utils import module_docs
from ansible.utils.vars import merge_hash
+from ansible.errors import AnsibleError
#####################################################################################
# constants and paths
@@ -67,11 +68,14 @@ NOTCORE = " (E)"
def rst_ify(text):
''' convert symbols like I(this is in italics) to valid restructured text '''
- t = _ITALIC.sub(r'*' + r"\1" + r"*", text)
- t = _BOLD.sub(r'**' + r"\1" + r"**", t)
- t = _MODULE.sub(r':ref:`' + r"\1 <\1>" + r"`", t)
- t = _URL.sub(r"\1", t)
- t = _CONST.sub(r'``' + r"\1" + r"``", t)
+ try:
+ t = _ITALIC.sub(r'*' + r"\1" + r"*", text)
+ t = _BOLD.sub(r'**' + r"\1" + r"**", t)
+ t = _MODULE.sub(r':ref:`' + r"\1 <\1>" + r"`", t)
+ t = _URL.sub(r"\1", t)
+ t = _CONST.sub(r'``' + r"\1" + r"``", t)
+ except Exception as e:
+ raise AnsibleError("Could not process (%s) : %s" % (str(text), str(e)))
return t
@@ -286,7 +290,7 @@ def process_module(module, options, env, template, outputname, module_map, alias
if too_old(added):
del doc['version_added']
- if 'options' in doc:
+ if 'options' in doc and doc['options']:
for (k,v) in doc['options'].iteritems():
# don't show version added information if it's too old to be called out
if 'version_added' in doc['options'][k] and too_old(doc['options'][k]['version_added']):
@@ -308,7 +312,10 @@ def process_module(module, options, env, template, outputname, module_map, alias
# here is where we build the table of contents...
- text = template.render(doc)
+ try:
+ text = template.render(doc)
+ except Exception as e:
+ raise AnsibleError("Failed to render doc for %s: %s" % (fname, str(e)))
write_data(text, options, outputname, module)
return doc['short_description']
@@ -326,7 +333,7 @@ def print_modules(module, category_file, deprecated, core, options, env, templat
result = process_module(modname, options, env, template, outputname, module_map, aliases)
if result != "SKIPPED":
- category_file.write(" %s - %s <%s_module>\n" % (modstring, result, module))
+ category_file.write(" %s - %s <%s_module>\n" % (modstring, rst_ify(result), module))
def process_category(category, categories, options, env, template, outputname):
diff --git a/hacking/test-module b/hacking/test-module
index 4428264c66..bdb91d0d5b 100755
--- a/hacking/test-module
+++ b/hacking/test-module
@@ -112,7 +112,11 @@ def boilerplate_module(modfile, args, interpreter, check, destfile):
complex_args = utils_vars.combine_vars(complex_args, loader.load(args))
args=''
- inject = {}
+ if args:
+ parsed_args = parse_kv(args)
+ complex_args = utils_vars.combine_vars(complex_args, parsed_args)
+
+ task_vars = {}
if interpreter:
if '=' not in interpreter:
print 'interpreter must by in the form of ansible_python_interpreter=/usr/bin/python'
@@ -122,16 +126,15 @@ def boilerplate_module(modfile, args, interpreter, check, destfile):
interpreter_type = 'ansible_%s' % interpreter_type
if not interpreter_type.endswith('_interpreter'):
interpreter_type = '%s_interpreter' % interpreter_type
- inject[interpreter_type] = interpreter_path
+ task_vars[interpreter_type] = interpreter_path
if check:
complex_args['CHECKMODE'] = True
(module_data, module_style, shebang) = module_common.modify_module(
- modfile,
+ modfile,
complex_args,
- args,
- inject
+ task_vars=task_vars
)
modfile2_path = os.path.expanduser(destfile)
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index e66d14adf3..0308cbedd2 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -102,7 +102,10 @@ class CLI(object):
def run(self):
if self.options.verbosity > 0:
- self.display.display("Using %s as config file" % C.CONFIG_FILE)
+ if C.CONFIG_FILE:
+ self.display.display("Using %s as config file" % C.CONFIG_FILE)
+ else:
+ self.display.display("No config file found; using defaults")
@staticmethod
def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
@@ -183,7 +186,7 @@ class CLI(object):
self.options.become_method = 'su'
- def validate_conflicts(self, vault_opts=False, runas_opts=False):
+ def validate_conflicts(self, vault_opts=False, runas_opts=False, fork_opts=False):
''' check for conflicting options '''
op = self.options
@@ -208,13 +211,17 @@ class CLI(object):
"and become arguments ('--become', '--become-user', and '--ask-become-pass')"
" are exclusive of each other")
+ if fork_opts:
+ if op.forks < 1:
+ self.parser.error("The number of processes (--forks) must be >= 1")
+
@staticmethod
def expand_tilde(option, opt, value, parser):
setattr(parser.values, option.dest, os.path.expanduser(value))
@staticmethod
def base_parser(usage="", output_opts=False, runas_opts=False, meta_opts=False, runtask_opts=False, vault_opts=False,
- async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, diff_opts=False, epilog=None, fork_opts=False):
+ async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False):
''' create an options parser for most ansible scripts '''
#FIXME: implemente epilog parsing
@@ -225,12 +232,16 @@ class CLI(object):
parser.add_option('-v','--verbose', dest='verbosity', default=0, action="count",
help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
- if runtask_opts:
+ if inventory_opts:
parser.add_option('-i', '--inventory-file', dest='inventory',
help="specify inventory host file (default=%s)" % C.DEFAULT_HOST_LIST,
default=C.DEFAULT_HOST_LIST, action="callback", callback=CLI.expand_tilde, type=str)
parser.add_option('--list-hosts', dest='listhosts', action='store_true',
help='outputs a list of matching hosts; does not execute anything else')
+ parser.add_option('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
+ help='further limit selected hosts to an additional pattern')
+
+ if runtask_opts:
parser.add_option('-M', '--module-path', dest='module_path',
help="specify path(s) to module library (default=%s)" % C.DEFAULT_MODULE_PATH, default=None,
action="callback", callback=CLI.expand_tilde, type=str)
@@ -240,8 +251,6 @@ class CLI(object):
if fork_opts:
parser.add_option('-f','--forks', dest='forks', default=C.DEFAULT_FORKS, type='int',
help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)
- parser.add_option('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
- help='further limit selected hosts to an additional pattern')
if vault_opts:
parser.add_option('--ask-vault-pass', default=False, dest='ask_vault_pass', action='store_true',
@@ -301,8 +310,7 @@ class CLI(object):
help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
if async_opts:
- parser.add_option('-P', '--poll', default=C.DEFAULT_POLL_INTERVAL, type='int',
- dest='poll_interval',
+ parser.add_option('-P', '--poll', default=C.DEFAULT_POLL_INTERVAL, type='int', dest='poll_interval',
help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL)
parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
help='run asynchronously, failing after X seconds (default=N/A)')
@@ -312,11 +320,8 @@ class CLI(object):
help="don't make any changes; instead, try to predict some of the changes that may occur")
parser.add_option('--syntax-check', dest='syntax', action='store_true',
help="perform a syntax check on the playbook, but do not execute it")
-
- if diff_opts:
parser.add_option("-D", "--diff", default=False, dest='diff', action='store_true',
- help="when changing (small) files and templates, show the differences in those files; works great with --check"
- )
+ help="when changing (small) files and templates, show the differences in those files; works great with --check")
if meta_opts:
parser.add_option('--force-handlers', default=C.DEFAULT_FORCE_HANDLERS, dest='force_handlers', action='store_true',
@@ -425,21 +430,20 @@ class CLI(object):
return result
- @staticmethod
- def pager(text):
+ def pager(self, text):
''' find reasonable way to display text '''
# this is a much simpler form of what is in pydoc.py
if not sys.stdout.isatty():
- print(text)
+ self.display.display(text)
elif 'PAGER' in os.environ:
if sys.platform == 'win32':
- print(text)
+ self.display.display(text)
else:
- CLI.pager_pipe(text, os.environ['PAGER'])
+ self.pager_pipe(text, os.environ['PAGER'])
elif subprocess.call('(less --version) 2> /dev/null', shell = True) == 0:
- CLI.pager_pipe(text, 'less')
+ self.pager_pipe(text, 'less')
else:
- print(text)
+ self.display.display(text)
@staticmethod
def pager_pipe(text, cmd):
@@ -448,7 +452,7 @@ class CLI(object):
os.environ['LESS'] = CLI.LESS_OPTS
try:
cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
- cmd.communicate(input=text)
+ cmd.communicate(input=text.encode(sys.stdout.encoding))
except IOError:
pass
except KeyboardInterrupt:
diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py
index 9d1ab681da..58180e3ccd 100644
--- a/lib/ansible/cli/adhoc.py
+++ b/lib/ansible/cli/adhoc.py
@@ -38,6 +38,7 @@ class AdHocCLI(CLI):
self.parser = CLI.base_parser(
usage='%prog <host-pattern> [options]',
runas_opts=True,
+ inventory_opts=True,
async_opts=True,
output_opts=True,
connect_opts=True,
@@ -60,16 +61,16 @@ class AdHocCLI(CLI):
raise AnsibleOptionsError("Missing target hosts")
self.display.verbosity = self.options.verbosity
- self.validate_conflicts(runas_opts=True, vault_opts=True)
+ self.validate_conflicts(runas_opts=True, vault_opts=True, fork_opts=True)
return True
- def _play_ds(self, pattern):
+ def _play_ds(self, pattern, async, poll):
return dict(
name = "Ansible Ad-Hoc",
hosts = pattern,
gather_facts = 'no',
- tasks = [ dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args))), ]
+ tasks = [ dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args)), async=async, poll=poll) ]
)
def run(self):
@@ -111,6 +112,7 @@ class AdHocCLI(CLI):
self.display.warning("provided hosts list is empty, only localhost is available")
if self.options.listhosts:
+ self.display.display(' hosts (%d):' % len(hosts))
for host in hosts:
self.display.display(' %s' % host)
return 0
@@ -121,16 +123,7 @@ class AdHocCLI(CLI):
err = err + ' (did you mean to run ansible-playbook?)'
raise AnsibleOptionsError(err)
- #TODO: implement async support
- #if self.options.seconds:
- # callbacks.display("background launch...\n\n", color='cyan')
- # results, poller = runner.run_async(self.options.seconds)
- # results = self.poll_while_needed(poller)
- #else:
- # results = runner.run()
-
- # create a pseudo-play to execute the specified module via a single task
- play_ds = self._play_ds(pattern)
+ play_ds = self._play_ds(pattern, self.options.seconds, self.options.poll_interval)
play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)
if self.options.one_line:
@@ -138,6 +131,10 @@ class AdHocCLI(CLI):
else:
cb = 'minimal'
+ if self.options.tree:
+ C.DEFAULT_CALLBACK_WHITELIST.append('tree')
+ C.TREE_DIR = self.options.tree
+
# now create a task queue manager to execute the play
self._tqm = None
try:
@@ -156,15 +153,3 @@ class AdHocCLI(CLI):
self._tqm.cleanup()
return result
-
- # ----------------------------------------------
-
- def poll_while_needed(self, poller):
- ''' summarize results from Runner '''
-
- # BACKGROUND POLL LOGIC when -B and -P are specified
- if self.options.seconds and self.options.poll_interval > 0:
- poller.wait(self.options.seconds, self.options.poll_interval)
-
- return poller.results
-
diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py
index 910255cda7..dfdb8583ec 100644
--- a/lib/ansible/cli/doc.py
+++ b/lib/ansible/cli/doc.py
@@ -73,7 +73,7 @@ class DocCLI(CLI):
for path in paths:
self.find_modules(path)
- CLI.pager(self.get_module_list_text())
+ self.pager(self.get_module_list_text())
return 0
if len(self.args) == 0:
@@ -125,7 +125,7 @@ class DocCLI(CLI):
self.display.vvv(traceback.print_exc())
raise AnsibleError("module %s missing documentation (or could not parse documentation): %s\n" % (module, str(e)))
- CLI.pager(text)
+ self.pager(text)
return 0
def find_modules(self, path):
diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py
index 8347655568..ddc10794c0 100644
--- a/lib/ansible/cli/galaxy.py
+++ b/lib/ansible/cli/galaxy.py
@@ -154,8 +154,7 @@ class GalaxyCLI(CLI):
option --ignore-errors was specified
"""
if not self.get_opt("ignore_errors", False):
- self.display.error('- you can use --ignore-errors to skip failed roles and finish processing the list.')
- return rc
+ raise AnsibleError('- you can use --ignore-errors to skip failed roles and finish processing the list.')
def execute_init(self):
"""
@@ -321,14 +320,25 @@ class GalaxyCLI(CLI):
roles_done = []
roles_left = []
if role_file:
- f = open(role_file, 'r')
- if role_file.endswith('.yaml') or role_file.endswith('.yml'):
- roles_left = map(ansible.utils.role_yaml_parse, yaml.safe_load(f))
- else:
- # roles listed in a file, one per line
- for rname in f.readlines():
- roles_left.append(GalaxyRole(self.galaxy, rname.strip()))
- f.close()
+ self.display.debug('Getting roles from %s' % role_file)
+ try:
+ f = open(role_file, 'r')
+ if role_file.endswith('.yaml') or role_file.endswith('.yml'):
+ try:
+ rolesparsed = map(self.parse_requirements_files, yaml.safe_load(f))
+ except Exception as e:
+ raise AnsibleError("%s does not seem like a valid yaml file: %s" % (role_file, str(e)))
+ roles_left = [GalaxyRole(self.galaxy, **r) for r in rolesparsed]
+ else:
+ # roles listed in a file, one per line
+ self.display.deprecated("Non yaml files for role requirements")
+ for rname in f.readlines():
+ if rname.startswith("#") or rname.strip() == '':
+ continue
+ roles_left.append(GalaxyRole(self.galaxy, rname.strip()))
+ f.close()
+ except (IOError,OSError) as e:
+ raise AnsibleError("Unable to read requirements file (%s): %s" % (role_file, str(e)))
else:
# roles were specified directly, so we'll just go out grab them
# (and their dependencies, unless the user doesn't want us to).
@@ -341,6 +351,8 @@ class GalaxyCLI(CLI):
role = roles_left.pop(0)
role_path = role.path
+ self.display.debug('Installing role %s' % role_path)
+
if role_path:
self.options.roles_path = role_path
else:
@@ -391,27 +403,26 @@ class GalaxyCLI(CLI):
if tmp_file != role.src:
os.unlink(tmp_file)
# install dependencies, if we want them
-
- # this should use new roledepenencies code
- #if not no_deps and installed:
- # if not role_data:
- # role_data = gr.get_metadata(role.get("name"), options)
- # role_dependencies = role_data['dependencies']
- # else:
- # role_dependencies = role_data['summary_fields']['dependencies'] # api_fetch_role_related(api_server, 'dependencies', role_data['id'])
- # for dep in role_dependencies:
- # if isinstance(dep, basestring):
- # dep = ansible.utils.role_spec_parse(dep)
- # else:
- # dep = ansible.utils.role_yaml_parse(dep)
- # if not get_role_metadata(dep["name"], options):
- # if dep not in roles_left:
- # print '- adding dependency: %s' % dep["name"]
- # roles_left.append(dep)
- # else:
- # print '- dependency %s already pending installation.' % dep["name"]
- # else:
- # print '- dependency %s is already installed, skipping.' % dep["name"]
+ if not no_deps and installed:
+ if not role_data:
+ role_data = gr.get_metadata(role.get("name"), options)
+ role_dependencies = role_data['dependencies']
+ else:
+ role_dependencies = role_data['summary_fields']['dependencies'] # api_fetch_role_related(api_server, 'dependencies', role_data['id'])
+ for dep in role_dependencies:
+ self.display.debug('Installing dep %s' % dep)
+ if isinstance(dep, basestring):
+ dep = ansible.utils.role_spec_parse(dep)
+ else:
+ dep = ansible.utils.role_yaml_parse(dep)
+ if not get_role_metadata(dep["name"], options):
+ if dep not in roles_left:
+ self.display.display('- adding dependency: %s' % dep["name"])
+ roles_left.append(dep)
+ else:
+ self.display.display('- dependency %s already pending installation.' % dep["name"])
+ else:
+ self.display.display('- dependency %s is already installed, skipping.' % dep["name"])
if not tmp_file or not installed:
self.display.warning("- %s was NOT installed successfully." % role.name)
@@ -485,3 +496,36 @@ class GalaxyCLI(CLI):
version = "(unknown version)"
self.display.display("- %s, %s" % (path_file, version))
return 0
+
+ def parse_requirements_files(self, role):
+ if 'role' in role:
+ # Old style: {role: "galaxy.role,version,name", other_vars: "here" }
+ role_info = role_spec_parse(role['role'])
+ if isinstance(role_info, dict):
+ # Warning: Slight change in behaviour here. name may be being
+ # overloaded. Previously, name was only a parameter to the role.
+ # Now it is both a parameter to the role and the name that
+ # ansible-galaxy will install under on the local system.
+ if 'name' in role and 'name' in role_info:
+ del role_info['name']
+ role.update(role_info)
+ else:
+ # New style: { src: 'galaxy.role,version,name', other_vars: "here" }
+ if 'github.com' in role["src"] and 'http' in role["src"] and '+' not in role["src"] and not role["src"].endswith('.tar.gz'):
+ role["src"] = "git+" + role["src"]
+
+ if '+' in role["src"]:
+ (scm, src) = role["src"].split('+')
+ role["scm"] = scm
+ role["src"] = src
+
+ if 'name' not in role:
+ role["name"] = GalaxyRole.url_to_spec(role["src"])
+
+ if 'version' not in role:
+ role['version'] = ''
+
+ if 'scm' not in role:
+ role['scm'] = None
+
+ return role
diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py
index 2fa13aae25..156477ddb0 100644
--- a/lib/ansible/cli/playbook.py
+++ b/lib/ansible/cli/playbook.py
@@ -20,19 +20,12 @@
########################################################
import os
import stat
-import sys
-from ansible import constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.inventory import Inventory
from ansible.parsing import DataLoader
-from ansible.parsing.splitter import parse_kv
-from ansible.playbook import Playbook
-from ansible.playbook.task import Task
-from ansible.utils.display import Display
-from ansible.utils.unicode import to_unicode
from ansible.utils.vars import load_extra_vars
from ansible.vars import VariableManager
@@ -51,7 +44,7 @@ class PlaybookCLI(CLI):
runas_opts=True,
subset_opts=True,
check_opts=True,
- diff_opts=True,
+ inventory_opts=True,
runtask_opts=True,
vault_opts=True,
fork_opts=True,
@@ -76,7 +69,7 @@ class PlaybookCLI(CLI):
raise AnsibleOptionsError("You must specify a playbook file to run")
self.display.verbosity = self.options.verbosity
- self.validate_conflicts(runas_opts=True, vault_opts=True)
+ self.validate_conflicts(runas_opts=True, vault_opts=True, fork_opts=True)
def run(self):
@@ -103,12 +96,6 @@ class PlaybookCLI(CLI):
loader = DataLoader(vault_password=vault_pass)
- # FIXME: this should be moved inside the playbook executor code
- only_tags = self.options.tags.split(",")
- skip_tags = self.options.skip_tags
- if self.options.skip_tags is not None:
- skip_tags = self.options.skip_tags.split(",")
-
# initial error check, to make sure all specified playbooks are accessible
# before we start running anything through the playbook executor
for playbook in self.args:
@@ -150,20 +137,45 @@ class PlaybookCLI(CLI):
if isinstance(results, list):
for p in results:
- self.display.display('\nplaybook: %s\n' % p['playbook'])
+ self.display.display('\nplaybook: %s' % p['playbook'])
+ i = 1
for play in p['plays']:
+ if play.name:
+ playname = play.name
+ else:
+ playname = '#' + str(i)
+
+ msg = "\n PLAY: %s" % (playname)
+ mytags = set()
+ if self.options.listtags and play.tags:
+ mytags = mytags.union(set(play.tags))
+ msg += ' TAGS: [%s]' % (','.join(mytags))
+
if self.options.listhosts:
- self.display.display("\n %s (%s): host count=%d" % (play['name'], play['pattern'], len(play['hosts'])))
- for host in play['hosts']:
- self.display.display(" %s" % host)
- if self.options.listtasks: #TODO: do we want to display block info?
- self.display.display("\n %s" % (play['name']))
- for task in play['tasks']:
- self.display.display(" %s" % task)
- if self.options.listtags: #TODO: fix once we figure out block handling above
- self.display.display("\n %s: tags count=%d" % (play['name'], len(play['tags'])))
- for tag in play['tags']:
- self.display.display(" %s" % tag)
+ playhosts = set(inventory.get_hosts(play.hosts))
+ msg += "\n pattern: %s\n hosts (%d):" % (play.hosts, len(playhosts))
+ for host in playhosts:
+ msg += "\n %s" % host
+
+ self.display.display(msg)
+
+ if self.options.listtags or self.options.listtasks:
+ taskmsg = ' tasks:'
+
+ for block in play.compile():
+ if not block.has_tasks():
+ continue
+
+ j = 1
+ for task in block.block:
+ taskmsg += "\n %s" % task
+ if self.options.listtags and task.tags:
+ taskmsg += " TAGS: [%s]" % ','.join(mytags.union(set(task.tags)))
+ j = j + 1
+
+ self.display.display(taskmsg)
+
+ i = i + 1
return 0
else:
return results
diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py
index 569d5299a6..01e3b90bff 100644
--- a/lib/ansible/cli/pull.py
+++ b/lib/ansible/cli/pull.py
@@ -18,16 +18,16 @@
########################################################
import datetime
import os
+import platform
import random
import shutil
import socket
import sys
+import time
-from ansible import constants as C
-from ansible.errors import AnsibleError, AnsibleOptionsError
+from ansible.errors import AnsibleOptionsError
from ansible.cli import CLI
from ansible.plugins import module_loader
-from ansible.utils.display import Display
from ansible.utils.cmd_functions import run_cmd
########################################################
@@ -51,6 +51,8 @@ class PullCLI(CLI):
connect_opts=True,
vault_opts=True,
runtask_opts=True,
+ subset_opts=True,
+ inventory_opts=True,
)
# options unique to pull
@@ -106,7 +108,9 @@ class PullCLI(CLI):
# Build Checkout command
# Now construct the ansible command
- limit_opts = 'localhost:%s:127.0.0.1' % socket.getfqdn()
+ node = platform.node()
+ host = socket.getfqdn()
+ limit_opts = 'localhost:%s:127.0.0.1' % ':'.join(set([host, node, host.split('.')[0], node.split('.')[0]]))
base_opts = '-c local "%s"' % limit_opts
if self.options.verbosity > 0:
base_opts += ' -%s' % ''.join([ "v" for x in range(0, self.options.verbosity) ])
@@ -179,6 +183,8 @@ class PullCLI(CLI):
cmd += ' -K'
if self.options.tags:
cmd += ' -t "%s"' % self.options.tags
+ if self.options.limit:
+ cmd += ' -l "%s"' % self.options.limit
os.chdir(self.options.dest)
@@ -215,7 +221,7 @@ class PullCLI(CLI):
fqdn = socket.getfqdn()
hostpb = os.path.join(path, fqdn + '.yml')
shorthostpb = os.path.join(path, fqdn.split('.')[0] + '.yml')
- localpb = os.path.join(path, DEFAULT_PLAYBOOK)
+ localpb = os.path.join(path, self.DEFAULT_PLAYBOOK)
errors = []
for pb in [hostpb, shorthostpb, localpb]:
rc = self.try_playbook(pb)
diff --git a/lib/ansible/cli/vault.py b/lib/ansible/cli/vault.py
index 170f0cc1ca..8cf1bf7388 100644
--- a/lib/ansible/cli/vault.py
+++ b/lib/ansible/cli/vault.py
@@ -119,6 +119,9 @@ class VaultCLI(CLI):
self.display.display("Encryption successful")
def execute_rekey(self):
+ for f in self.args:
+ if not (os.path.isfile(f)):
+ raise AnsibleError(f + " does not exist")
__, new_password = self.ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=True, confirm_new=True)
for f in self.args:
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index 6751afe54a..8cb3bcb2fb 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -40,7 +40,7 @@ def mk_boolean(value):
else:
return False
-def get_config(p, section, key, env_var, default, boolean=False, integer=False, floating=False, islist=False):
+def get_config(p, section, key, env_var, default, boolean=False, integer=False, floating=False, islist=False, isnone=False):
''' return a configuration variable with casting '''
value = _get_config(p, section, key, env_var, default)
if boolean:
@@ -53,6 +53,9 @@ def get_config(p, section, key, env_var, default, boolean=False, integer=False,
elif islist:
if isinstance(value, string_types):
value = [x.strip() for x in value.split(',')]
+ elif isnone:
+ if value == "None":
+ value = None
elif isinstance(value, string_types):
value = unquote(value)
return value
@@ -78,6 +81,8 @@ def load_config_file():
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
path0 = os.path.expanduser(path0)
+ if os.path.isdir(path0):
+ path0 += "/ansible.cfg"
path1 = os.getcwd() + "/ansible.cfg"
path2 = os.path.expanduser("~/.ansible.cfg")
path3 = "/etc/ansible/ansible.cfg"
@@ -109,9 +114,11 @@ YAML_FILENAME_EXTENSIONS = [ "", ".yml", ".yaml", ".json" ]
# sections in config file
DEFAULTS='defaults'
+DEPRECATED_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', '/etc/ansible/hosts'))
+
# generally configurable things
DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, boolean=True)
-DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', get_config(p, DEFAULTS,'inventory','ANSIBLE_INVENTORY', '/etc/ansible/hosts')))
+DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS,'inventory', 'ANSIBLE_INVENTORY', DEPRECATED_HOST_LIST))
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', None)
DEFAULT_ROLES_PATH = shell_expand_path(get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles'))
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp')
@@ -141,6 +148,7 @@ DEFAULT_EXECUTABLE = get_config(p, DEFAULTS, 'executable', 'ANSIBLE_EXECU
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
+DEFAULT_INVENTORY_IGNORE = get_config(p, DEFAULTS, 'inventory_ignore_extensions', 'ANSIBLE_INVENTORY_IGNORE', ["~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo"], islist=True)
# selinux
DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf, ramfs', islist=True)
@@ -159,8 +167,8 @@ DEFAULT_SUDO_FLAGS = get_config(p, DEFAULTS, 'sudo_flags', 'ANSIBLE_SUDO_
DEFAULT_ASK_SUDO_PASS = get_config(p, DEFAULTS, 'ask_sudo_pass', 'ANSIBLE_ASK_SUDO_PASS', False, boolean=True)
# Become
-BECOME_ERROR_STRINGS = {'sudo': 'Sorry, try again.', 'su': 'Authentication failure', 'pbrun': '', 'pfexec': '', 'runas': ''} #FIXME: deal with i18n
-BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas']
+BECOME_ERROR_STRINGS = {'sudo': 'Sorry, try again.', 'su': 'Authentication failure', 'pbrun': '', 'pfexec': '', 'runas': '', 'doas': 'Permission denied'} #FIXME: deal with i18n
+BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas','doas']
DEFAULT_BECOME_METHOD = get_config(p, 'privilege_escalation', 'become_method', 'ANSIBLE_BECOME_METHOD','sudo' if DEFAULT_SUDO else 'su' if DEFAULT_SU else 'sudo' ).lower()
DEFAULT_BECOME = get_config(p, 'privilege_escalation', 'become', 'ANSIBLE_BECOME',False, boolean=True)
DEFAULT_BECOME_USER = get_config(p, 'privilege_escalation', 'become_user', 'ANSIBLE_BECOME_USER', 'root')
@@ -177,6 +185,7 @@ DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', '
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '~/.ansible/plugins/lookup_plugins:/usr/share/ansible_plugins/lookup_plugins')
DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS', '~/.ansible/plugins/vars_plugins:/usr/share/ansible_plugins/vars_plugins')
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '~/.ansible/plugins/filter_plugins:/usr/share/ansible_plugins/filter_plugins')
+DEFAULT_TEST_PLUGIN_PATH = get_config(p, DEFAULTS, 'test_plugins', 'ANSIBLE_TEST_PLUGINS', '~/.ansible/plugins/test_plugins:/usr/share/ansible_plugins/test_plugins')
DEFAULT_STDOUT_CALLBACK = get_config(p, DEFAULTS, 'stdout_callback', 'ANSIBLE_STDOUT_CALLBACK', 'default')
CACHE_PLUGIN = get_config(p, DEFAULTS, 'fact_caching', 'ANSIBLE_CACHE_PLUGIN', 'memory')
@@ -194,11 +203,12 @@ HOST_KEY_CHECKING = get_config(p, DEFAULTS, 'host_key_checking', '
SYSTEM_WARNINGS = get_config(p, DEFAULTS, 'system_warnings', 'ANSIBLE_SYSTEM_WARNINGS', True, boolean=True)
DEPRECATION_WARNINGS = get_config(p, DEFAULTS, 'deprecation_warnings', 'ANSIBLE_DEPRECATION_WARNINGS', True, boolean=True)
DEFAULT_CALLABLE_WHITELIST = get_config(p, DEFAULTS, 'callable_whitelist', 'ANSIBLE_CALLABLE_WHITELIST', [], islist=True)
-COMMAND_WARNINGS = get_config(p, DEFAULTS, 'command_warnings', 'ANSIBLE_COMMAND_WARNINGS', False, boolean=True)
+COMMAND_WARNINGS = get_config(p, DEFAULTS, 'command_warnings', 'ANSIBLE_COMMAND_WARNINGS', True, boolean=True)
DEFAULT_LOAD_CALLBACK_PLUGINS = get_config(p, DEFAULTS, 'bin_ansible_callbacks', 'ANSIBLE_LOAD_CALLBACK_PLUGINS', False, boolean=True)
DEFAULT_CALLBACK_WHITELIST = get_config(p, DEFAULTS, 'callback_whitelist', 'ANSIBLE_CALLBACK_WHITELIST', [], islist=True)
RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
+DEFAULT_NULL_REPRESENTATION = get_config(p, DEFAULTS, 'null_representation', 'ANSIBLE_NULL_REPRESENTATION', None, isnone=True)
# CONNECTION RELATED
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
@@ -238,3 +248,5 @@ DEFAULT_SU_PASS = None
VAULT_VERSION_MIN = 1.0
VAULT_VERSION_MAX = 1.0
MAX_FILE_SIZE_FOR_DIFF = 1*1024*1024
+TREE_DIR = None
+LOCALHOST = frozenset(['127.0.0.1', 'localhost', '::1'])
diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
index 85dcafb961..09fdaa46d8 100644
--- a/lib/ansible/executor/module_common.py
+++ b/lib/ansible/executor/module_common.py
@@ -37,6 +37,7 @@ REPLACER = "#<<INCLUDE_ANSIBLE_MODULE_COMMON>>"
REPLACER_ARGS = "\"<<INCLUDE_ANSIBLE_MODULE_ARGS>>\""
REPLACER_COMPLEX = "\"<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>\""
REPLACER_WINDOWS = "# POWERSHELL_COMMON"
+REPLACER_WINARGS = "<<INCLUDE_ANSIBLE_MODULE_WINDOWS_ARGS>>"
REPLACER_VERSION = "\"<<ANSIBLE_VERSION>>\""
# We could end up writing out parameters with unicode characters so we need to
@@ -65,6 +66,8 @@ def _find_snippet_imports(module_data, module_path, strip_comments):
module_style = 'old'
if REPLACER in module_data:
module_style = 'new'
+ elif REPLACER_WINDOWS in module_data:
+ module_style = 'new'
elif 'from ansible.module_utils.' in module_data:
module_style = 'new'
elif 'WANT_JSON' in module_data:
@@ -165,6 +168,7 @@ def modify_module(module_path, module_args, task_vars=dict(), strip_comments=Fal
# these strings should be part of the 'basic' snippet which is required to be included
module_data = module_data.replace(REPLACER_VERSION, repr(__version__))
module_data = module_data.replace(REPLACER_COMPLEX, encoded_args)
+ module_data = module_data.replace(REPLACER_WINARGS, module_args_json.encode('utf-8'))
if module_style == 'new':
facility = C.DEFAULT_SYSLOG_FACILITY
diff --git a/lib/ansible/executor/play_iterator.py b/lib/ansible/executor/play_iterator.py
index 45089d19d2..c6870e0224 100644
--- a/lib/ansible/executor/play_iterator.py
+++ b/lib/ansible/executor/play_iterator.py
@@ -130,7 +130,7 @@ class PlayIterator:
task = None
if s.run_state == self.ITERATING_COMPLETE:
- return None
+ return (None, None)
elif s.run_state == self.ITERATING_SETUP:
s.run_state = self.ITERATING_TASKS
s.pending_setup = True
@@ -162,8 +162,8 @@ class PlayIterator:
if task and task._role:
# if we had a current role, mark that role as completed
- if s.cur_role and task._role != s.cur_role and s.cur_role._had_task_run and not peek:
- s.cur_role._completed = True
+ if s.cur_role and task._role != s.cur_role and host.name in s.cur_role._had_task_run and not peek:
+ s.cur_role._completed[host.name] = True
s.cur_role = task._role
if not peek:
diff --git a/lib/ansible/executor/playbook_executor.py b/lib/ansible/executor/playbook_executor.py
index 9c9df6499d..96833d5c17 100644
--- a/lib/ansible/executor/playbook_executor.py
+++ b/lib/ansible/executor/playbook_executor.py
@@ -33,6 +33,8 @@ from ansible.template import Templar
from ansible.utils.color import colorize, hostcolor
from ansible.utils.debug import debug
+from ansible.utils.encrypt import do_encrypt
+from ansible.utils.unicode import to_unicode
class PlaybookExecutor:
@@ -121,29 +123,7 @@ class PlaybookExecutor:
if self._tqm is None:
# we are just doing a listing
-
- pname = new_play.get_name().strip()
- if pname == 'PLAY: <no name specified>':
- pname = 'PLAY: #%d' % i
- p = { 'name': pname }
-
- if self._options.listhosts:
- p['pattern']=play.hosts
- p['hosts']=set(self._inventory.get_hosts(new_play.hosts))
-
- #TODO: play tasks are really blocks, need to figure out how to get task objects from them
- elif self._options.listtasks:
- p['tasks'] = []
- for task in play.get_tasks():
- p['tasks'].append(task)
- #p['tasks'].append({'name': task.get_name().strip(), 'tags': task.tags})
-
- elif self._options.listtags:
- p['tags'] = set(new_play.tags)
- for task in play.get_tasks():
- p['tags'].update(task)
- #p['tags'].update(task.tags)
- entry['plays'].append(p)
+ entry['plays'].append(new_play)
else:
# make sure the tqm has callbacks loaded
@@ -258,46 +238,48 @@ class PlaybookExecutor:
def _do_var_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
- if prompt and default is not None:
- msg = "%s [%s]: " % (prompt, default)
- elif prompt:
- msg = "%s: " % prompt
- else:
- msg = 'input for %s: ' % varname
-
- def do_prompt(prompt, private):
- if sys.stdout.encoding:
- msg = prompt.encode(sys.stdout.encoding)
+ if sys.__stdin__.isatty():
+ if prompt and default is not None:
+ msg = "%s [%s]: " % (prompt, default)
+ elif prompt:
+ msg = "%s: " % prompt
+ else:
+ msg = 'input for %s: ' % varname
+
+ def do_prompt(prompt, private):
+ if sys.stdout.encoding:
+ msg = prompt.encode(sys.stdout.encoding)
+ else:
+ # when piping the output, or at other times when stdout
+ # may not be the standard file descriptor, the stdout
+ # encoding may not be set, so default to something sane
+ msg = prompt.encode(locale.getpreferredencoding())
+ if private:
+ return getpass.getpass(msg)
+ return raw_input(msg)
+
+ if confirm:
+ while True:
+ result = do_prompt(msg, private)
+ second = do_prompt("confirm " + msg, private)
+ if result == second:
+ break
+ self._display.display("***** VALUES ENTERED DO NOT MATCH ****")
else:
- # when piping the output, or at other times when stdout
- # may not be the standard file descriptor, the stdout
- # encoding may not be set, so default to something sane
- msg = prompt.encode(locale.getpreferredencoding())
- if private:
- return getpass.getpass(msg)
- return raw_input(msg)
-
- if confirm:
- while True:
result = do_prompt(msg, private)
- second = do_prompt("confirm " + msg, private)
- if result == second:
- break
- display("***** VALUES ENTERED DO NOT MATCH ****")
else:
- result = do_prompt(msg, private)
+ result = None
+ self._display.warning("Not prompting as we are not in interactive mode")
# if result is false and default is not None
if not result and default is not None:
result = default
- # FIXME: make this work with vault or whatever this old method was
- #if encrypt:
- # result = utils.do_encrypt(result, encrypt, salt_size, salt)
+ if encrypt:
+ result = do_encrypt(result, encrypt, salt_size, salt)
# handle utf-8 chars
- # FIXME: make this work
- #result = to_unicode(result, errors='strict')
+ result = to_unicode(result, errors='strict')
return result
diff --git a/lib/ansible/executor/process/result.py b/lib/ansible/executor/process/result.py
index 038a68fbef..c7027412ff 100644
--- a/lib/ansible/executor/process/result.py
+++ b/lib/ansible/executor/process/result.py
@@ -74,7 +74,7 @@ class ResultProcess(multiprocessing.Process):
try:
if not rslt_q.empty():
debug("worker %d has data to read" % self._cur_worker)
- result = rslt_q.get(block=False)
+ result = rslt_q.get()
debug("got a result from worker %d: %s" % (self._cur_worker, result))
break
except queue.Empty:
@@ -102,7 +102,7 @@ class ResultProcess(multiprocessing.Process):
try:
result = self._read_worker_result()
if result is None:
- time.sleep(0.1)
+ time.sleep(0.01)
continue
# if this task is registering a result, do it now
diff --git a/lib/ansible/executor/process/worker.py b/lib/ansible/executor/process/worker.py
index 4b2bd13b9c..d73434652a 100644
--- a/lib/ansible/executor/process/worker.py
+++ b/lib/ansible/executor/process/worker.py
@@ -92,40 +92,36 @@ class WorkerProcess(multiprocessing.Process):
while True:
task = None
try:
- if not self._main_q.empty():
- debug("there's work to be done!")
- (host, task, basedir, job_vars, play_context, shared_loader_obj) = self._main_q.get(block=False)
- debug("got a task/handler to work on: %s" % task)
-
- # because the task queue manager starts workers (forks) before the
- # playbook is loaded, set the basedir of the loader inherted by
- # this fork now so that we can find files correctly
- self._loader.set_basedir(basedir)
-
- # Serializing/deserializing tasks does not preserve the loader attribute,
- # since it is passed to the worker during the forking of the process and
- # would be wasteful to serialize. So we set it here on the task now, and
- # the task handles updating parent/child objects as needed.
- task.set_loader(self._loader)
-
- # apply the given task's information to the connection info,
- # which may override some fields already set by the play or
- # the options specified on the command line
- new_play_context = play_context.set_task_and_host_override(task=task, host=host)
-
- # execute the task and build a TaskResult from the result
- debug("running TaskExecutor() for %s/%s" % (host, task))
- executor_result = TaskExecutor(host, task, job_vars, new_play_context, self._new_stdin, self._loader, shared_loader_obj).run()
- debug("done running TaskExecutor() for %s/%s" % (host, task))
- task_result = TaskResult(host, task, executor_result)
-
- # put the result on the result queue
- debug("sending task result")
- self._rslt_q.put(task_result, block=False)
- debug("done sending task result")
-
- else:
- time.sleep(0.1)
+ (host, task, basedir, job_vars, play_context, shared_loader_obj) = self._main_q.get()
+ debug("there's work to be done!")
+ debug("got a task/handler to work on: %s" % task)
+
+ # because the task queue manager starts workers (forks) before the
+ # playbook is loaded, set the basedir of the loader inherted by
+ # this fork now so that we can find files correctly
+ self._loader.set_basedir(basedir)
+
+ # Serializing/deserializing tasks does not preserve the loader attribute,
+ # since it is passed to the worker during the forking of the process and
+ # would be wasteful to serialize. So we set it here on the task now, and
+ # the task handles updating parent/child objects as needed.
+ task.set_loader(self._loader)
+
+ # apply the given task's information to the connection info,
+ # which may override some fields already set by the play or
+ # the options specified on the command line
+ new_play_context = play_context.set_task_and_variable_override(task=task, variables=job_vars)
+
+ # execute the task and build a TaskResult from the result
+ debug("running TaskExecutor() for %s/%s" % (host, task))
+ executor_result = TaskExecutor(host, task, job_vars, new_play_context, self._new_stdin, self._loader, shared_loader_obj).run()
+ debug("done running TaskExecutor() for %s/%s" % (host, task))
+ task_result = TaskResult(host, task, executor_result)
+
+ # put the result on the result queue
+ debug("sending task result")
+ self._rslt_q.put(task_result)
+ debug("done sending task result")
except queue.Empty:
pass
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index f9bc7cd2ed..6b9dceaefb 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -26,10 +26,10 @@ import sys
import time
from ansible import constants as C
-from ansible.errors import AnsibleError, AnsibleParserError
+from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
-from ansible.plugins import lookup_loader, connection_loader, action_loader
+from ansible.plugins import connection_loader, action_loader
from ansible.template import Templar
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.unicode import to_unicode
@@ -127,6 +127,13 @@ class TaskExecutor:
return result
except AnsibleError, e:
return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
+ finally:
+ try:
+ self._connection.close()
+ except AttributeError:
+ pass
+ except Exception, e:
+ debug("error closing connection: %s" % to_unicode(e))
def _get_loop_items(self):
'''
@@ -134,10 +141,30 @@ class TaskExecutor:
and returns the items result.
'''
+ # create a copy of the job vars here so that we can modify
+ # them temporarily without changing them too early for other
+ # parts of the code that might still need a pristine version
+ vars_copy = self._job_vars.copy()
+
+ # now we update them with the play context vars
+ self._play_context.update_vars(vars_copy)
+
+ templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=vars_copy)
items = None
- if self._task.loop and self._task.loop in lookup_loader:
- loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, variables=self._job_vars, loader=self._loader)
- items = lookup_loader.get(self._task.loop, loader=self._loader).run(terms=loop_terms, variables=self._job_vars)
+ if self._task.loop:
+ if self._task.loop in self._shared_loader_obj.lookup_loader:
+ #TODO: remove convert_bare true and deprecate this in with_
+ try:
+ loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True)
+ except AnsibleUndefinedVariable as e:
+ if 'has no attribute' in str(e):
+ loop_terms = []
+ self._display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.")
+ else:
+ raise
+ items = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
+ else:
+ raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)
return items
@@ -327,9 +354,12 @@ class TaskExecutor:
result['failed_when_result'] = result['failed'] = failed_when_result
if failed_when_result:
break
- elif 'failed' not in result and result.get('rc', 0) == 0:
- # if the result is not failed, stop trying
- break
+ elif 'failed' not in result:
+ if result.get('rc', 0) != 0:
+ result['failed'] = True
+ else:
+ # if the result is not failed, stop trying
+ break
if attempt < retries - 1:
time.sleep(delay)
@@ -401,7 +431,6 @@ class TaskExecutor:
correct connection object from the list of connection plugins
'''
- # FIXME: delegate_to calculation should be done here
# FIXME: calculation of connection params/auth stuff should be done here
if not self._play_context.remote_addr:
@@ -467,30 +496,35 @@ class TaskExecutor:
# get the vars for the delegate by its name
try:
+ self._display.debug("Delegating to %s" % self._task.delegate_to)
this_info = variables['hostvars'][self._task.delegate_to]
# get the real ssh_address for the delegate and allow ansible_ssh_host to be templated
- #self._play_context.remote_user = self._compute_delegate_user(self.delegate_to, delegate['inject'])
self._play_context.remote_addr = this_info.get('ansible_ssh_host', self._task.delegate_to)
+ self._play_context.remote_user = this_info.get('ansible_remote_user', self._task.remote_user)
self._play_context.port = this_info.get('ansible_ssh_port', self._play_context.port)
self._play_context.password = this_info.get('ansible_ssh_pass', self._play_context.password)
self._play_context.private_key_file = this_info.get('ansible_ssh_private_key_file', self._play_context.private_key_file)
- self._play_context.connection = this_info.get('ansible_connection', C.DEFAULT_TRANSPORT)
self._play_context.become_pass = this_info.get('ansible_sudo_pass', self._play_context.become_pass)
- except:
+
+ conn = this_info.get('ansible_connection', self._task.connection)
+ if conn:
+ self._play_context.connection = conn
+
+ except Exception as e:
# make sure the inject is empty for non-inventory hosts
this_info = {}
-
- if self._play_context.remote_addr in ('127.0.0.1', 'localhost'):
- self._play_context.connection = 'local'
+ self._display.debug("Delegate due to: %s" % str(e))
# Last chance to get private_key_file from global variables.
# this is useful if delegated host is not defined in the inventory
- #if delegate['private_key_file'] is None:
- # delegate['private_key_file'] = remote_inject.get('ansible_ssh_private_key_file', None)
+ if self._play_context.private_key_file is None:
+ self._play_context.private_key_file = this_info.get('ansible_ssh_private_key_file', None)
- #if delegate['private_key_file'] is not None:
- # delegate['private_key_file'] = os.path.expanduser(delegate['private_key_file'])
+ if self._play_context.private_key_file is None:
+ key = this_info.get('private_key_file', None)
+ if key:
+ self._play_context.private_key_file = os.path.expanduser(key)
for i in this_info:
if i.startswith("ansible_") and i.endswith("_interpreter"):
diff --git a/lib/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py
index d618e953fd..3a58ccb6d1 100644
--- a/lib/ansible/galaxy/role.py
+++ b/lib/ansible/galaxy/role.py
@@ -182,28 +182,30 @@ class GalaxyRole(object):
"""
Downloads the archived role from github to a temp location
"""
+ if role_data:
- # first grab the file and save it to a temp location
- if "github_user" in role_data and "github_repo" in role_data:
- archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
- else:
- archive_url = self.src
- self.display.display("- downloading role from %s" % archive_url)
+ # first grab the file and save it to a temp location
+ if "github_user" in role_data and "github_repo" in role_data:
+ archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
+ else:
+ archive_url = self.src
+ self.display.display("- downloading role from %s" % archive_url)
- try:
- url_file = urlopen(archive_url)
- temp_file = tempfile.NamedTemporaryFile(delete=False)
- data = url_file.read()
- while data:
- temp_file.write(data)
+ try:
+ url_file = urlopen(archive_url)
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
data = url_file.read()
- temp_file.close()
- return temp_file.name
- except:
- # TODO: better urllib2 error handling for error
- # messages that are more exact
- self.display.error("failed to download the file.")
- return False
+ while data:
+ temp_file.write(data)
+ data = url_file.read()
+ temp_file.close()
+ return temp_file.name
+ except:
+ # TODO: better urllib2 error handling for error
+ # messages that are more exact
+ self.display.error("failed to download the file.")
+
+ return False
def install(self, role_filename):
# the file is a tar, so open it that way and extract it
@@ -292,3 +294,21 @@ class GalaxyRole(object):
}
"""
return dict(scm=self.scm, src=self.src, version=self.version, name=self.name)
+
+
+ @staticmethod
+ def url_to_spec(roleurl):
+ # gets the role name out of a repo like
+ # http://git.example.com/repos/repo.git" => "repo"
+
+ if '://' not in roleurl and '@' not in roleurl:
+ return roleurl
+ trailing_path = roleurl.split('/')[-1]
+ if trailing_path.endswith('.git'):
+ trailing_path = trailing_path[:-4]
+ if trailing_path.endswith('.tar.gz'):
+ trailing_path = trailing_path[:-7]
+ if ',' in trailing_path:
+ trailing_path = trailing_path.split(',')[0]
+ return trailing_path
+
diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py
index e647170882..d25409b0ae 100644
--- a/lib/ansible/inventory/__init__.py
+++ b/lib/ansible/inventory/__init__.py
@@ -24,26 +24,28 @@ import os
import sys
import re
import stat
-import subprocess
from ansible import constants as C
-from ansible import errors
+from ansible.errors import AnsibleError
-from ansible.inventory.ini import InventoryParser
-from ansible.inventory.script import InventoryScript
-from ansible.inventory.dir import InventoryDirectory
+from ansible.inventory.dir import InventoryDirectory, get_file_parser
from ansible.inventory.group import Group
from ansible.inventory.host import Host
from ansible.plugins import vars_loader
-from ansible.utils.path import is_executable
from ansible.utils.vars import combine_vars
+try:
+ from __main__ import display
+except ImportError:
+ from ansible.utils.display import Display
+ display = Display()
+
class Inventory(object):
"""
Host inventory for ansible.
"""
- #__slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
+ #__slots__ = [ 'host_list', 'groups', '_restriction', '_subset',
# 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
# '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
@@ -63,6 +65,8 @@ class Inventory(object):
self._hosts_cache = {}
self._groups_list = {}
self._pattern_cache = {}
+ self._vars_plugins = []
+ self._groups_cache = {}
# to be set by calling set_playbook_basedir by playbook code
self._playbook_basedir = None
@@ -72,9 +76,12 @@ class Inventory(object):
# a list of host(names) to contain current inquiries to
self._restriction = None
- self._also_restriction = None
self._subset = None
+ self.parse_inventory(host_list)
+
+ def parse_inventory(self, host_list):
+
if isinstance(host_list, basestring):
if "," in host_list:
host_list = host_list.split(",")
@@ -102,62 +109,31 @@ class Inventory(object):
else:
all.add_host(Host(x))
elif os.path.exists(host_list):
+ #TODO: switch this to a plugin loader and a 'condition' per plugin on which it should be tried, restoring 'inventory pllugins'
if os.path.isdir(host_list):
# Ensure basedir is inside the directory
- self.host_list = os.path.join(self.host_list, "")
+ host_list = os.path.join(self.host_list, "")
self.parser = InventoryDirectory(loader=self._loader, filename=host_list)
- self.groups = self.parser.groups.values()
else:
- # check to see if the specified file starts with a
- # shebang (#!/), so if an error is raised by the parser
- # class we can show a more apropos error
- shebang_present = False
- try:
- with open(host_list, "r") as inv_file:
- first_line = inv_file.readline()
- if first_line.startswith("#!"):
- shebang_present = True
- except IOError:
- pass
-
- if is_executable(host_list):
- try:
- self.parser = InventoryScript(loader=self._loader, filename=host_list)
- self.groups = self.parser.groups.values()
- except errors.AnsibleError:
- if not shebang_present:
- raise errors.AnsibleError("The file %s is marked as executable, but failed to execute correctly. " % host_list + \
- "If this is not supposed to be an executable script, correct this with `chmod -x %s`." % host_list)
- else:
- raise
- else:
- try:
- self.parser = InventoryParser(filename=host_list)
- self.groups = self.parser.groups.values()
- except errors.AnsibleError:
- if shebang_present:
- raise errors.AnsibleError("The file %s looks like it should be an executable inventory script, but is not marked executable. " % host_list + \
- "Perhaps you want to correct this with `chmod +x %s`?" % host_list)
- else:
- raise
+ self.parser = get_file_parser(host_list, self._loader)
+ vars_loader.add_directory(self.basedir(), with_subdir=True)
- vars_loader.add_directory(self.basedir(), with_subdir=True)
- else:
- raise errors.AnsibleError("Unable to find an inventory file (%s), "
- "specify one with -i ?" % host_list)
+ if self.parser:
+ self.groups = self.parser.groups.values()
+ else:
+ # should never happen, but JIC
+ raise AnsibleError("Unable to parse %s as an inventory source" % host_list)
- self._vars_plugins = [ x for x in vars_loader.all(self) ]
+ self._vars_plugins = [ x for x in vars_loader.all(self) ]
# FIXME: shouldn't be required, since the group/host vars file
# management will be done in VariableManager
# get group vars from group_vars/ files and vars plugins
for group in self.groups:
- # FIXME: combine_vars
group.vars = combine_vars(group.vars, self.get_group_variables(group.name))
# get host vars from host_vars/ files and vars plugins
for host in self.get_hosts():
- # FIXME: combine_vars
host.vars = combine_vars(host.vars, self.get_host_variables(host.name))
@@ -168,7 +144,7 @@ class Inventory(object):
else:
return fnmatch.fnmatch(str, pattern_str)
except Exception, e:
- raise errors.AnsibleError('invalid host pattern: %s' % pattern_str)
+ raise AnsibleError('invalid host pattern: %s' % pattern_str)
def _match_list(self, items, item_attr, pattern_str):
results = []
@@ -178,42 +154,63 @@ class Inventory(object):
else:
pattern = re.compile(pattern_str[1:])
except Exception, e:
- raise errors.AnsibleError('invalid host pattern: %s' % pattern_str)
+ raise AnsibleError('invalid host pattern: %s' % pattern_str)
for item in items:
if pattern.match(getattr(item, item_attr)):
results.append(item)
return results
+ def _split_pattern(self, pattern):
+ """
+ takes e.g. "webservers[0:5]:dbservers:others"
+ and returns ["webservers[0:5]", "dbservers", "others"]
+ """
+
+ term = re.compile(
+ r'''(?: # We want to match something comprising:
+ [^:\[\]] # (anything other than ':', '[', or ']'
+ | # ...or...
+ \[[^\]]*\] # a single complete bracketed expression)
+ )* # repeated as many times as possible
+ ''', re.X
+ )
+
+ return [x for x in term.findall(pattern) if x]
+
def get_hosts(self, pattern="all"):
"""
- find all host names matching a pattern string, taking into account any inventory restrictions or
- applied subsets.
+ Takes a pattern or list of patterns and returns a list of matching
+ inventory host names, taking into account any active restrictions
+ or applied subsets
"""
- # process patterns
+ # Enumerate all hosts matching the given pattern (which may be
+ # either a list of patterns or a string like 'pat1:pat2').
if isinstance(pattern, list):
- pattern = ';'.join(pattern)
- patterns = pattern.replace(";",":").split(":")
- hosts = self._get_hosts(patterns)
+ pattern = ':'.join(pattern)
+
+ if ';' in pattern or ',' in pattern:
+ display.deprecated("Use ':' instead of ',' or ';' to separate host patterns", version=2.0, removed=True)
+
+ patterns = self._split_pattern(pattern)
+ hosts = self._evaluate_patterns(patterns)
# exclude hosts not in a subset, if defined
if self._subset:
- subset = self._get_hosts(self._subset)
+ subset = self._evaluate_patterns(self._subset)
hosts = [ h for h in hosts if h in subset ]
# exclude hosts mentioned in any restriction (ex: failed hosts)
if self._restriction is not None:
hosts = [ h for h in hosts if h in self._restriction ]
- if self._also_restriction is not None:
- hosts = [ h for h in hosts if h in self._also_restriction ]
return hosts
- def _get_hosts(self, patterns):
+ def _evaluate_patterns(self, patterns):
"""
- finds hosts that match a list of patterns. Handles negative
- matches as well as intersection matches.
+ Takes a list of patterns and returns a list of matching host names,
+ taking into account any negative and intersection patterns.
"""
# Host specifiers should be sorted to ensure consistent behavior
@@ -244,7 +241,7 @@ class Inventory(object):
if p in self._hosts_cache:
hosts.append(self.get_host(p))
else:
- that = self.__get_hosts(p)
+ that = self._match_one_pattern(p)
if p.startswith("!"):
hosts = [ h for h in hosts if h not in that ]
elif p.startswith("&"):
@@ -254,10 +251,11 @@ class Inventory(object):
hosts.extend(to_append)
return hosts
- def __get_hosts(self, pattern):
+ def _match_one_pattern(self, pattern):
"""
- finds hosts that positively match a particular pattern. Does not
- take into account negative matches.
+ Takes a single pattern (i.e., not "p1:p2") and returns a list of
+ matching hosts names. Does not take negatives or intersections
+ into account.
"""
if pattern in self._pattern_cache:
@@ -288,7 +286,7 @@ class Inventory(object):
first = int(first)
if last:
if first < 0:
- raise errors.AnsibleError("invalid range: negative indices cannot be used as the first item in a range")
+ raise AnsibleError("invalid range: negative indices cannot be used as the first item in a range")
last = int(last)
else:
last = first
@@ -326,7 +324,7 @@ class Inventory(object):
else:
return [ hosts[left] ]
except IndexError:
- raise errors.AnsibleError("no hosts matching the pattern '%s' were found" % pat)
+ raise AnsibleError("no hosts matching the pattern '%s' were found" % pat)
def _create_implicit_localhost(self, pattern):
new_host = Host(pattern)
@@ -353,7 +351,7 @@ class Inventory(object):
pattern = pattern.replace("!","").replace("&", "")
def __append_host_to_results(host):
- if host not in results and host.name not in hostnames:
+ if host.name not in hostnames:
hostnames.add(host.name)
results.append(host)
@@ -371,7 +369,7 @@ class Inventory(object):
for host in matching_hosts:
__append_host_to_results(host)
- if pattern in ["localhost", "127.0.0.1", "::1"] and len(results) == 0:
+ if pattern in C.LOCALHOST and len(results) == 0:
new_host = self._create_implicit_localhost(pattern)
results.append(new_host)
return results
@@ -396,6 +394,7 @@ class Inventory(object):
if a.name not in groups:
groups[a.name] = [h.name for h in a.get_hosts()]
self._groups_list = groups
+ self._groups_cache = {}
return self._groups_list
def get_groups(self):
@@ -404,26 +403,31 @@ class Inventory(object):
def get_host(self, hostname):
if hostname not in self._hosts_cache:
self._hosts_cache[hostname] = self._get_host(hostname)
+ if hostname in C.LOCALHOST:
+ for host in C.LOCALHOST.difference((hostname,)):
+ self._hosts_cache[host] = self._hosts_cache[hostname]
return self._hosts_cache[hostname]
def _get_host(self, hostname):
- if hostname in ['localhost', '127.0.0.1', '::1']:
+ if hostname in C.LOCALHOST:
for host in self.get_group('all').get_hosts():
- if host.name in ['localhost', '127.0.0.1', '::1']:
+ if host.name in C.LOCALHOST:
return host
return self._create_implicit_localhost(hostname)
- else:
- for group in self.groups:
- for host in group.get_hosts():
- if hostname == host.name:
- return host
- return None
+ matching_host = None
+ for group in self.groups:
+ for host in group.get_hosts():
+ if hostname == host.name:
+ matching_host = host
+ self._hosts_cache[host.name] = host
+ return matching_host
def get_group(self, groupname):
- for group in self.groups:
- if group.name == groupname:
- return group
- return None
+ if not self._groups_cache:
+ for group in self.groups:
+ self._groups_cache[group.name] = group
+
+ return self._groups_cache.get(groupname)
def get_group_variables(self, groupname, update_cached=False, vault_password=None):
if groupname not in self._vars_per_group or update_cached:
@@ -442,11 +446,9 @@ class Inventory(object):
vars_results = [ plugin.get_group_vars(group, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_group_vars')]
for updated in vars_results:
if updated is not None:
- # FIXME: combine_vars
vars = combine_vars(vars, updated)
# Read group_vars/ files
- # FIXME: combine_vars
vars = combine_vars(vars, self.get_group_vars(group))
return vars
@@ -468,7 +470,7 @@ class Inventory(object):
host = self.get_host(hostname)
if host is None:
- raise errors.AnsibleError("host not found: %s" % hostname)
+ raise AnsibleError("host not found: %s" % hostname)
vars = {}
@@ -476,25 +478,21 @@ class Inventory(object):
vars_results = [ plugin.run(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'run')]
for updated in vars_results:
if updated is not None:
- # FIXME: combine_vars
vars = combine_vars(vars, updated)
# plugin.get_host_vars retrieves just vars for specific host
vars_results = [ plugin.get_host_vars(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_host_vars')]
for updated in vars_results:
if updated is not None:
- # FIXME: combine_vars
vars = combine_vars(vars, updated)
# still need to check InventoryParser per host vars
# which actually means InventoryScript per host,
# which is not performant
if self.parser is not None:
- # FIXME: combine_vars
vars = combine_vars(vars, self.parser.get_host_variables(host))
# Read host_vars/ files
- # FIXME: combine_vars
vars = combine_vars(vars, self.get_host_vars(host))
return vars
@@ -503,15 +501,16 @@ class Inventory(object):
if group.name not in self.groups_list():
self.groups.append(group)
self._groups_list = None # invalidate internal cache
+ self._groups_cache = {}
else:
- raise errors.AnsibleError("group already in inventory: %s" % group.name)
+ raise AnsibleError("group already in inventory: %s" % group.name)
def list_hosts(self, pattern="all"):
""" return a list of hostnames for a pattern """
result = [ h for h in self.get_hosts(pattern) ]
- if len(result) == 0 and pattern in ["localhost", "127.0.0.1", "::1"]:
+ if len(result) == 0 and pattern in C.LOCALHOST:
result = [pattern]
return result
@@ -521,22 +520,13 @@ class Inventory(object):
def restrict_to_hosts(self, restriction):
"""
Restrict list operations to the hosts given in restriction. This is used
- to exclude failed hosts in main playbook code, don't use this for other
+ to batch serial operations in main playbook code, don't use this for other
reasons.
"""
if not isinstance(restriction, list):
restriction = [ restriction ]
self._restriction = restriction
- def also_restrict_to(self, restriction):
- """
- Works like restict_to but offers an additional restriction. Playbooks use this
- to implement serial behavior.
- """
- if not isinstance(restriction, list):
- restriction = [ restriction ]
- self._also_restriction = restriction
-
def subset(self, subset_pattern):
"""
Limits inventory results to a subset of inventory that matches a given
@@ -547,11 +537,13 @@ class Inventory(object):
if subset_pattern is None:
self._subset = None
else:
- subset_pattern = subset_pattern.replace(',',':')
- subset_pattern = subset_pattern.replace(";",":").split(":")
+ if ';' in subset_pattern or ',' in subset_pattern:
+ display.deprecated("Use ':' instead of ',' or ';' to separate host patterns", version=2.0, removed=True)
+
+ subset_patterns = self._split_pattern(subset_pattern)
results = []
# allow Unix style @filename data
- for x in subset_pattern:
+ for x in subset_patterns:
if x.startswith("@"):
fd = open(x[1:])
results.extend(fd.read().split("\n"))
@@ -564,10 +556,6 @@ class Inventory(object):
""" Do not restrict list operations """
self._restriction = None
- def lift_also_restriction(self):
- """ Clears the also restriction """
- self._also_restriction = None
-
def is_file(self):
""" did inventory come from a file? """
if not isinstance(self.host_list, basestring):
@@ -665,13 +653,25 @@ class Inventory(object):
# FIXME: these should go to VariableManager
if group and host is None:
# load vars in dir/group_vars/name_of_group
- base_path = os.path.join(basedir, "group_vars/%s" % group.name)
+ base_path = os.path.realpath(os.path.join(basedir, "group_vars/%s" % group.name))
results = self._variable_manager.add_group_vars_file(base_path, self._loader)
elif host and group is None:
# same for hostvars in dir/host_vars/name_of_host
- base_path = os.path.join(basedir, "host_vars/%s" % host.name)
+ base_path = os.path.realpath(os.path.join(basedir, "host_vars/%s" % host.name))
results = self._variable_manager.add_host_vars_file(base_path, self._loader)
# all done, results is a dictionary of variables for this particular host.
return results
+ def refresh_inventory(self):
+
+ self.clear_pattern_cache()
+
+ self._hosts_cache = {}
+ self._vars_per_host = {}
+ self._vars_per_group = {}
+ self._groups_list = {}
+ self._groups_cache = {}
+ self.groups = []
+
+ self.parse_inventory(self.host_list)
diff --git a/lib/ansible/inventory/dir.py b/lib/ansible/inventory/dir.py
index 735f32d62c..e456a950d4 100644
--- a/lib/ansible/inventory/dir.py
+++ b/lib/ansible/inventory/dir.py
@@ -27,11 +27,58 @@ from ansible.errors import AnsibleError
from ansible.inventory.host import Host
from ansible.inventory.group import Group
-from ansible.inventory.ini import InventoryParser
-from ansible.inventory.script import InventoryScript
-from ansible.utils.path import is_executable
from ansible.utils.vars import combine_vars
+from ansible.utils.path import is_executable
+from ansible.inventory.ini import InventoryParser as InventoryINIParser
+from ansible.inventory.script import InventoryScript
+
+__all__ = ['get_file_parser']
+
+def get_file_parser(hostsfile, loader):
+ # check to see if the specified file starts with a
+ # shebang (#!/), so if an error is raised by the parser
+ # class we can show a more apropos error
+
+ shebang_present = False
+ processed = False
+ myerr = []
+ parser = None
+
+ try:
+ inv_file = open(hostsfile)
+ first_line = inv_file.readlines()[0]
+ inv_file.close()
+ if first_line.startswith('#!'):
+ shebang_present = True
+ except:
+ pass
+
+ if is_executable(hostsfile):
+ try:
+ parser = InventoryScript(loader=loader, filename=hostsfile)
+ processed = True
+ except Exception as e:
+ myerr.append("The file %s is marked as executable, but failed to execute correctly. " % hostsfile + \
+ "If this is not supposed to be an executable script, correct this with `chmod -x %s`." % hostsfile)
+ myerr.append(str(e))
+
+ if not processed:
+ try:
+ parser = InventoryINIParser(filename=hostsfile)
+ processed = True
+ except Exception as e:
+ if shebang_present and not is_executable(hostsfile):
+ myerr.append("The file %s looks like it should be an executable inventory script, but is not marked executable. " % hostsfile + \
+ "Perhaps you want to correct this with `chmod +x %s`?" % hostsfile)
+ else:
+ myerr.append(str(e))
+
+ if not processed and myerr:
+ raise AnsibleError( '\n'.join(myerr) )
+
+ return parser
+
class InventoryDirectory(object):
''' Host inventory parser for ansible using a directory of inventories. '''
@@ -48,7 +95,7 @@ class InventoryDirectory(object):
for i in self.names:
# Skip files that end with certain extensions or characters
- if any(i.endswith(ext) for ext in ("~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo")):
+ if any(i.endswith(ext) for ext in C.DEFAULT_INVENTORY_IGNORE):
continue
# Skip hidden files
if i.startswith('.') and not i.startswith('./'):
@@ -59,10 +106,14 @@ class InventoryDirectory(object):
fullpath = os.path.join(self.directory, i)
if os.path.isdir(fullpath):
parser = InventoryDirectory(loader=loader, filename=fullpath)
- elif is_executable(fullpath):
- parser = InventoryScript(loader=loader, filename=fullpath)
else:
- parser = InventoryParser(filename=fullpath)
+ parser = get_file_parser(fullpath, loader)
+ if parser is None:
+ #FIXME: needs to use display
+ import warnings
+ warnings.warning("Could not find parser for %s, skipping" % fullpath)
+ continue
+
self.parsers.append(parser)
# retrieve all groups and hosts form the parser and add them to
diff --git a/lib/ansible/inventory/expand_hosts.py b/lib/ansible/inventory/expand_hosts.py
index b5a957c53f..0d63ba08bb 100644
--- a/lib/ansible/inventory/expand_hosts.py
+++ b/lib/ansible/inventory/expand_hosts.py
@@ -75,12 +75,11 @@ def expand_hostname_range(line = None):
# of hosts and then repeat until none left.
# - also add an optional third parameter which contains the step. (Default: 1)
# so range can be [01:10:2] -> 01 03 05 07 09
- # FIXME: make this work for alphabetic sequences too.
(head, nrange, tail) = line.replace('[','|',1).replace(']','|',1).split('|')
bounds = nrange.split(":")
if len(bounds) != 2 and len(bounds) != 3:
- raise errors.AnsibleError("host range incorrectly specified")
+ raise errors.AnsibleError("host range must be begin:end or begin:end:step")
beg = bounds[0]
end = bounds[1]
if len(bounds) == 2:
@@ -90,11 +89,11 @@ def expand_hostname_range(line = None):
if not beg:
beg = "0"
if not end:
- raise errors.AnsibleError("host range end value missing")
+ raise errors.AnsibleError("host range must specify end value")
if beg[0] == '0' and len(beg) > 1:
rlen = len(beg) # range length formatting hint
if rlen != len(end):
- raise errors.AnsibleError("host range format incorrectly specified!")
+ raise errors.AnsibleError("host range must specify equal-length begin and end formats")
fill = lambda _: str(_).zfill(rlen) # range sequence
else:
fill = str
@@ -103,8 +102,8 @@ def expand_hostname_range(line = None):
i_beg = string.ascii_letters.index(beg)
i_end = string.ascii_letters.index(end)
if i_beg > i_end:
- raise errors.AnsibleError("host range format incorrectly specified!")
- seq = string.ascii_letters[i_beg:i_end+1]
+ raise errors.AnsibleError("host range must have begin <= end")
+ seq = list(string.ascii_letters[i_beg:i_end+1:int(step)])
except ValueError: # not an alpha range
seq = range(int(beg), int(end)+1, int(step))
diff --git a/lib/ansible/inventory/host.py b/lib/ansible/inventory/host.py
index 2de190ce1d..43a96d54bf 100644
--- a/lib/ansible/inventory/host.py
+++ b/lib/ansible/inventory/host.py
@@ -19,7 +19,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible import constants as C
from ansible.inventory.group import Group
from ansible.utils.vars import combine_vars
@@ -76,7 +75,7 @@ class Host:
self.ipv4_address = name
self.ipv6_address = name
- if port and port != C.DEFAULT_REMOTE_PORT:
+ if port:
self.set_variable('ansible_ssh_port', int(port))
self._gathered_facts = False
diff --git a/lib/ansible/inventory/ini.py b/lib/ansible/inventory/ini.py
index 8d5f5f052f..2769632ef2 100644
--- a/lib/ansible/inventory/ini.py
+++ b/lib/ansible/inventory/ini.py
@@ -1,4 +1,4 @@
-# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+# Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
@@ -33,27 +33,282 @@ from ansible.utils.unicode import to_unicode
class InventoryParser(object):
"""
- Host inventory for ansible.
+ Takes an INI-format inventory file and builds a list of groups and subgroups
+ with their associated hosts and variable settings.
"""
def __init__(self, filename=C.DEFAULT_HOST_LIST):
self.filename = filename
+
+ # Start with an empty host list and the default 'all' and
+ # 'ungrouped' groups.
+
+ self.hosts = {}
+ self.patterns = {}
+ self.groups = dict(
+ all = Group(name='all'),
+ ungrouped = Group(name='ungrouped')
+ )
+
+ # Read in the hosts, groups, and variables defined in the
+ # inventory file.
+
with open(filename) as fh:
- self.lines = fh.readlines()
- self.groups = {}
- self.hosts = {}
- self._parse()
+ self._parse(fh.readlines())
+
+ # Finally, add all top-level groups (including 'ungrouped') as
+ # children of 'all'.
+
+ for group in self.groups.values():
+ if group.depth == 0 and group.name != 'all':
+ self.groups['all'].add_child_group(group)
+
+ # Note: we could discard self.hosts after this point.
+
+ def _raise_error(self, message):
+ raise AnsibleError("%s:%d: " % (self.filename, self.lineno) + message)
+
+ def _parse(self, lines):
+ '''
+ Populates self.groups from the given array of lines. Raises an error on
+ any parse failure.
+ '''
+
+ self._compile_patterns()
+
+ # We behave as though the first line of the inventory is '[ungrouped]',
+ # and begin to look for host definitions. We make a single pass through
+ # each line of the inventory, building up self.groups and adding hosts,
+ # subgroups, and setting variables as we go.
+
+ pending_declarations = {}
+ groupname = 'ungrouped'
+ state = 'hosts'
+
+ self.lineno = 0
+ for line in lines:
+ self.lineno += 1
+
+ line = line.strip()
+
+ # Skip empty lines and comments
+ if line == '' or line.startswith(";") or line.startswith("#"):
+ continue
+
+ # Is this a [section] header? That tells us what group we're parsing
+ # definitions for, and what kind of definitions to expect.
+
+ m = self.patterns['section'].match(line)
+ if m:
+ (groupname, state) = m.groups()
+
+ state = state or 'hosts'
+ if state not in ['hosts', 'children', 'vars']:
+ title = ":".join(m.groups())
+ self._raise_error("Section [%s] has unknown type: %s" % (title, state))
+
+ # If we haven't seen this group before, we add a new Group.
+ #
+ # Either [groupname] or [groupname:children] is sufficient to
+ # declare a group, but [groupname:vars] is allowed only if the
+ # group is declared elsewhere (not necessarily earlier). We add
+ # the group anyway, but make a note in pending_declarations to
+ # check at the end.
+
+ if groupname not in self.groups:
+ self.groups[groupname] = Group(name=groupname)
+
+ if state == 'vars':
+ pending_declarations[groupname] = dict(line=self.lineno, state=state, name=groupname)
+
+ # When we see a declaration that we've been waiting for, we can
+ # delete the note.
+
+ if groupname in pending_declarations and state != 'vars':
+ del pending_declarations[groupname]
+
+ continue
+
+ # It's not a section, so the current state tells us what kind of
+ # definition it must be. The individual parsers will raise an
+ # error if we feed them something they can't digest.
+
+ # [groupname] contains host definitions that must be added to
+ # the current group.
+ if state == 'hosts':
+ hosts = self._parse_host_definition(line)
+ for h in hosts:
+ self.groups[groupname].add_host(h)
+
+ # [groupname:vars] contains variable definitions that must be
+ # applied to the current group.
+ elif state == 'vars':
+ (k, v) = self._parse_variable_definition(line)
+ self.groups[groupname].set_variable(k, v)
+
+ # [groupname:children] contains subgroup names that must be
+ # added as children of the current group. The subgroup names
+ # must themselves be declared as groups, but as before, they
+ # may only be declared later.
+ elif state == 'children':
+ child = self._parse_group_name(line)
+
+ if child not in self.groups:
+ self.groups[child] = Group(name=child)
+ pending_declarations[child] = dict(line=self.lineno, state=state, name=child, parent=groupname)
+
+ self.groups[groupname].add_child_group(self.groups[child])
+
+ # Note: there's no reason why we couldn't accept variable
+ # definitions here, and set them on the named child group.
+
+ # This is a fencepost. It can happen only if the state checker
+ # accepts a state that isn't handled above.
+ else:
+ self._raise_error("Entered unhandled state: %s" % (state))
+
+ # Any entries in pending_declarations not removed by a group declaration
+ # above mean that there was an unresolved forward reference. We report
+ # only the first such error here.
+
+ for g in pending_declarations:
+ decl = pending_declarations[g]
+ if decl['state'] == 'vars':
+ raise AnsibleError("%s:%d: Section [%s:vars] not valid for undefined group: %s" % (self.filename, decl['line'], decl['name'], decl['name']))
+ elif decl['state'] == 'children':
+ raise AnsibleError("%s:%d: Section [%s:children] includes undefined group: %s" % (self.filename, decl['line'], decl['parent'], decl['name']))
+
+ def _parse_group_name(self, line):
+ '''
+ Takes a single line and tries to parse it as a group name. Returns the
+ group name if successful, or raises an error.
+ '''
- def _parse(self):
+ m = self.patterns['groupname'].match(line)
+ if m:
+ return m.group(1)
- self._parse_base_groups()
- self._parse_group_children()
- self._add_allgroup_children()
- self._parse_group_variables()
- return self.groups
+ self._raise_error("Expected group name, got: %s" % (line))
+
+ def _parse_variable_definition(self, line):
+ '''
+ Takes a string and tries to parse it as a variable definition. Returns
+ the key and value if successful, or raises an error.
+ '''
+
+ # TODO: We parse variable assignments as a key (anything to the left of
+ # an '='"), an '=', and a value (anything left) and leave the value to
+ # _parse_value to sort out. We should be more systematic here about
+ # defining what is acceptable, how quotes work, and so on.
+
+ if '=' in line:
+ (k, v) = [e.strip() for e in line.split("=", 1)]
+ return (k, self._parse_value(v))
+
+ self._raise_error("Expected key=value, got: %s" % (line))
+
+ def _parse_host_definition(self, line):
+ '''
+ Takes a single line and tries to parse it as a host definition. Returns
+ a list of Hosts if successful, or raises an error.
+ '''
+
+ # A host definition comprises (1) a non-whitespace hostname or range,
+ # optionally followed by (2) a series of key="some value" assignments.
+ # We ignore any trailing whitespace and/or comments. For example, here
+ # are a series of host definitions in a group:
+ #
+ # [groupname]
+ # alpha
+ # beta:2345 user=admin # we'll tell shlex
+ # gamma sudo=True user=root # to ignore comments
+
+ try:
+ tokens = shlex.split(line, comments=True)
+ except ValueError as e:
+ self._raise_error("Error parsing host definition '%s': %s" % (varstring, e))
+
+ (hostnames, port) = self._expand_hostpattern(tokens[0])
+ hosts = self._Hosts(hostnames, port)
+
+ # Try to process anything remaining as a series of key=value pairs.
+
+ variables = {}
+ for t in tokens[1:]:
+ if '=' not in t:
+ self._raise_error("Expected key=value host variable assignment, got: %s" % (t))
+ (k, v) = t.split('=', 1)
+ variables[k] = self._parse_value(v)
+
+ # Apply any variable settings found to every host.
+
+ for h in hosts:
+ for k in variables:
+ h.set_variable(k, variables[k])
+ if k == 'ansible_ssh_host':
+ h.ipv4_address = variables[k]
+
+ return hosts
+
+ def _expand_hostpattern(self, hostpattern):
+ '''
+ Takes a single host pattern and returns a list of hostnames and an
+ optional port number that applies to all of them.
+ '''
+
+ # Is a port number specified?
+ #
+ # This may be a mandatory :NN suffix on any square-bracketed expression
+ # (IPv6 address, IPv4 address, host name, host pattern), or an optional
+ # :NN suffix on an IPv4 address, host name, or pattern. IPv6 addresses
+ # must be in square brackets if a port is specified.
+
+ port = None
+
+ for type in ['bracketed_hostport', 'hostport']:
+ m = self.patterns[type].match(hostpattern)
+ if m:
+ (hostpattern, port) = m.groups()
+ continue
+
+ # Now we're left with just the pattern, which results in a list of one
+ # or more hostnames, depending on whether it contains any [x:y] ranges.
+ #
+ # FIXME: We could be more strict here about validation.
+
+ if detect_range(hostpattern):
+ hostnames = expand_hostname_range(hostpattern)
+ else:
+ hostnames = [hostpattern]
+
+ return (hostnames, port)
+
+ def _Hosts(self, hostnames, port):
+ '''
+ Takes a list of hostnames and a port (which may be None) and returns a
+ list of Hosts (without recreating anything in self.hosts).
+ '''
+
+ hosts = []
+
+ # Note that we decide whether or not to create a Host based solely on
+ # the (non-)existence of its hostname in self.hosts. This means that one
+ # cannot add both "foo:22" and "foo:23" to the inventory. This behaviour
+ # is preserved for now, but this may be an easy FIXME.
+
+ for hn in hostnames:
+ if hn not in self.hosts:
+ self.hosts[hn] = Host(name=hn, port=port)
+ hosts.append(self.hosts[hn])
+
+ return hosts
@staticmethod
def _parse_value(v):
+ '''
+ Does something with something and returns something. Not for mere
+ mortals such as myself to interpret.
+ '''
if "#" not in v:
try:
v = ast.literal_eval(v)
@@ -67,152 +322,72 @@ class InventoryParser(object):
pass
return to_unicode(v, nonstring='passthru', errors='strict')
- # [webservers]
- # alpha
- # beta:2345
- # gamma sudo=True user=root
- # delta asdf=jkl favcolor=red
+ def get_host_variables(self, host):
+ return {}
- def _add_allgroup_children(self):
+ def _compile_patterns(self):
+ '''
+ Compiles the regular expressions required to parse the inventory and
+ stores them in self.patterns.
+ '''
- for group in self.groups.values():
- if group.depth == 0 and group.name != 'all':
- self.groups['all'].add_child_group(group)
+ # Section names are square-bracketed expressions at the beginning of a
+ # line, comprising (1) a group name optionally followed by (2) a tag
+ # that specifies the contents of the section. We ignore any trailing
+ # whitespace and/or comments. For example:
+ #
+ # [groupname]
+ # [somegroup:vars]
+ # [naughty:children] # only get coal in their stockings
+ self.patterns['section'] = re.compile(
+ r'''^\[
+ ([^:\]\s]+) # group name (see groupname below)
+ (?::(\w+))? # optional : and tag name
+ \]
+ \s* # ignore trailing whitespace
+ (?:\#.*)? # and/or a comment till the
+ $ # end of the line
+ ''', re.X
+ )
- def _parse_base_groups(self):
- # FIXME: refactor
-
- ungrouped = Group(name='ungrouped')
- all = Group(name='all')
- all.add_child_group(ungrouped)
-
- self.groups = dict(all=all, ungrouped=ungrouped)
- active_group_name = 'ungrouped'
-
- for line in self.lines:
- line = self._before_comment(line).strip()
- if line.startswith("[") and line.endswith("]"):
- active_group_name = line.replace("[","").replace("]","")
- if ":vars" in line or ":children" in line:
- active_group_name = active_group_name.rsplit(":", 1)[0]
- if active_group_name not in self.groups:
- new_group = self.groups[active_group_name] = Group(name=active_group_name)
- active_group_name = None
- elif active_group_name not in self.groups:
- new_group = self.groups[active_group_name] = Group(name=active_group_name)
- elif line.startswith(";") or line == '':
- pass
- elif active_group_name:
- tokens = shlex.split(line)
- if len(tokens) == 0:
- continue
- hostname = tokens[0]
- port = C.DEFAULT_REMOTE_PORT
- # Three cases to check:
- # 0. A hostname that contains a range pesudo-code and a port
- # 1. A hostname that contains just a port
- if hostname.count(":") > 1:
- # Possible an IPv6 address, or maybe a host line with multiple ranges
- # IPv6 with Port XXX:XXX::XXX.port
- # FQDN foo.example.com
- if hostname.count(".") == 1:
- (hostname, port) = hostname.rsplit(".", 1)
- elif ("[" in hostname and
- "]" in hostname and
- ":" in hostname and
- (hostname.rindex("]") < hostname.rindex(":")) or
- ("]" not in hostname and ":" in hostname)):
- (hostname, port) = hostname.rsplit(":", 1)
-
- hostnames = []
- if detect_range(hostname):
- hostnames = expand_hostname_range(hostname)
- else:
- hostnames = [hostname]
-
- for hn in hostnames:
- host = None
- if hn in self.hosts:
- host = self.hosts[hn]
- else:
- host = Host(name=hn, port=port)
- self.hosts[hn] = host
- if len(tokens) > 1:
- for t in tokens[1:]:
- if t.startswith('#'):
- break
- try:
- (k,v) = t.split("=", 1)
- except ValueError, e:
- raise AnsibleError("Invalid ini entry in %s: %s - %s" % (self.filename, t, str(e)))
- v = self._parse_value(v)
- if k == 'ansible_ssh_host':
- host.ipv4_address = v
- host.set_variable(k, v)
- self.groups[active_group_name].add_host(host)
-
- # [southeast:children]
- # atlanta
- # raleigh
-
- def _parse_group_children(self):
- group = None
-
- for line in self.lines:
- line = line.strip()
- if line is None or line == '':
- continue
- if line.startswith("[") and ":children]" in line:
- line = line.replace("[","").replace(":children]","")
- group = self.groups.get(line, None)
- if group is None:
- group = self.groups[line] = Group(name=line)
- elif line.startswith("#") or line.startswith(";"):
- pass
- elif line.startswith("["):
- group = None
- elif group:
- kid_group = self.groups.get(line, None)
- if kid_group is None:
- raise AnsibleError("child group is not defined: (%s)" % line)
- else:
- group.add_child_group(kid_group)
-
-
- # [webservers:vars]
- # http_port=1234
- # maxRequestsPerChild=200
-
- def _parse_group_variables(self):
- group = None
- for line in self.lines:
- line = line.strip()
- if line.startswith("[") and ":vars]" in line:
- line = line.replace("[","").replace(":vars]","")
- group = self.groups.get(line, None)
- if group is None:
- raise AnsibleError("can't add vars to undefined group: %s" % line)
- elif line.startswith("#") or line.startswith(";"):
- pass
- elif line.startswith("["):
- group = None
- elif line == '':
- pass
- elif group:
- if "=" not in line:
- raise AnsibleError("variables assigned to group must be in key=value form")
- else:
- (k, v) = [e.strip() for e in line.split("=", 1)]
- group.set_variable(k, self._parse_value(v))
+ # FIXME: What are the real restrictions on group names, or rather, what
+ # should they be? At the moment, they must be non-empty sequences of non
+ # whitespace characters excluding ':' and ']', but we should define more
+ # precise rules in order to support better diagnostics. The same applies
+ # to hostnames. It seems sensible for them both to follow DNS rules.
- def get_host_variables(self, host):
- return {}
+ self.patterns['groupname'] = re.compile(
+ r'''^
+ ([^:\]\s]+)
+ \s* # ignore trailing whitespace
+ (?:\#.*)? # and/or a comment till the
+ $ # end of the line
+ ''', re.X
+ )
+
+ # The following patterns match the various ways in which a port number
+ # may be specified on an IPv6 address, IPv4 address, hostname, or host
+ # pattern. All of the above may be enclosed in square brackets with a
+ # mandatory :NN suffix; or all but the first may be given without any
+ # brackets but with an :NN suffix.
- def _before_comment(self, msg):
- ''' what's the part of a string before a comment? '''
- msg = msg.replace("\#","**NOT_A_COMMENT**")
- msg = msg.split("#")[0]
- msg = msg.replace("**NOT_A_COMMENT**","#")
- return msg
+ self.patterns['bracketed_hostport'] = re.compile(
+ r'''^
+ \[(.+)\] # [host identifier]
+ :([0-9]+) # :port number
+ $
+ ''', re.X
+ )
+ self.patterns['hostport'] = re.compile(
+ r'''^
+ ((?: # We want to match:
+ [^:\[\]] # (a non-range character
+ | # ...or...
+ \[[^\]]*\] # a complete bracketed expression)
+ )*) # repeated as many times as possible
+ :([0-9]+) # followed by a port number
+ $
+ ''', re.X
+ )
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 3c2def324a..8df490bf0a 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -94,32 +94,34 @@ try:
except ImportError:
pass
-HAVE_HASHLIB=False
try:
- from hashlib import sha1 as _sha1
- HAVE_HASHLIB=True
+ from systemd import journal
+ has_journal = True
except ImportError:
- from sha import sha as _sha1
+ has_journal = False
+AVAILABLE_HASH_ALGORITHMS = dict()
try:
- from hashlib import md5 as _md5
+ import hashlib
+
+ # python 2.7.9+ and 2.7.0+
+ for attribute in ('available_algorithms', 'algorithms'):
+ algorithms = getattr(hashlib, attribute, None)
+ if algorithms:
+ break
+ if algorithms is None:
+ # python 2.5+
+ algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
+ for algorithm in algorithms:
+ AVAILABLE_HASH_ALGORITHMS[algorithm] = getattr(hashlib, algorithm)
except ImportError:
+ import sha
+ AVAILABLE_HASH_ALGORITHMS = {'sha1': sha.sha}
try:
- from md5 import md5 as _md5
+ import md5
+ AVAILABLE_HASH_ALGORITHMS['md5'] = md5.md5
except ImportError:
- # MD5 unavailable. Possibly FIPS mode
- _md5 = None
-
-try:
- from hashlib import sha256 as _sha256
-except ImportError:
- pass
-
-try:
- from systemd import journal
- has_journal = True
-except ImportError:
- has_journal = False
+ pass
try:
from ast import literal_eval as _literal_eval
@@ -1341,21 +1343,31 @@ class AnsibleModule(object):
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
- def digest_from_file(self, filename, digest_method):
- ''' Return hex digest of local file for a given digest_method, or None if file is not present. '''
+ def digest_from_file(self, filename, algorithm):
+ ''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
if not os.path.exists(filename):
return None
if os.path.isdir(filename):
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
- digest = digest_method
+
+ # preserve old behaviour where the third parameter was a hash algorithm object
+ if hasattr(algorithm, 'hexdigest'):
+ digest_method = algorithm
+ else:
+ try:
+ digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]()
+ except KeyError:
+ self.fail_json(msg="Could not hash file '%s' with algorithm '%s'. Available algorithms: %s" %
+ (filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
+
blocksize = 64 * 1024
infile = open(filename, 'rb')
block = infile.read(blocksize)
while block:
- digest.update(block)
+ digest_method.update(block)
block = infile.read(blocksize)
infile.close()
- return digest.hexdigest()
+ return digest_method.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
@@ -1368,19 +1380,17 @@ class AnsibleModule(object):
Most uses of this function can use the module.sha1 function instead.
'''
- if not _md5:
+ if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
- return self.digest_from_file(filename, _md5())
+ return self.digest_from_file(filename, 'md5')
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
- return self.digest_from_file(filename, _sha1())
+ return self.digest_from_file(filename, 'sha1')
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
- if not HAVE_HASHLIB:
- self.fail_json(msg="SHA-256 checksums require hashlib, which is available in Python 2.5 and higher")
- return self.digest_from_file(filename, _sha256())
+ return self.digest_from_file(filename, 'sha256')
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
diff --git a/lib/ansible/module_utils/cloudstack.py b/lib/ansible/module_utils/cloudstack.py
index 752defec2b..5bfd3d2db9 100644
--- a/lib/ansible/module_utils/cloudstack.py
+++ b/lib/ansible/module_utils/cloudstack.py
@@ -35,7 +35,7 @@ except ImportError:
has_lib_cs = False
-class AnsibleCloudStack:
+class AnsibleCloudStack(object):
def __init__(self, module):
if not has_lib_cs:
@@ -45,6 +45,25 @@ class AnsibleCloudStack:
'changed': False,
}
+ # Common returns, will be merged with self.returns
+ # search_for_key: replace_with_key
+ self.common_returns = {
+ 'id': 'id',
+ 'name': 'name',
+ 'created': 'created',
+ 'zonename': 'zone',
+ 'state': 'state',
+ 'project': 'project',
+ 'account': 'account',
+ 'domain': 'domain',
+ 'displaytext': 'display_text',
+ 'displayname': 'display_name',
+ 'description': 'description',
+ }
+
+ # Init returns dict for use in subclasses
+ self.returns = {}
+
self.module = module
self._connect()
@@ -366,3 +385,22 @@ class AnsibleCloudStack:
break
time.sleep(2)
return job
+
+
+ def get_result(self, resource):
+ if resource:
+ returns = self.common_returns.copy()
+ returns.update(self.returns)
+ for search_key, return_key in returns.iteritems():
+ if search_key in resource:
+ self.result[return_key] = resource[search_key]
+
+ # Special handling for tags
+ if 'tags' in resource:
+ self.result['tags'] = []
+ for tag in resource['tags']:
+ result_tag = {}
+ result_tag['key'] = tag['key']
+ result_tag['value'] = tag['value']
+ self.result['tags'].append(result_tag)
+ return self.result
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 3eaecb9c37..064b5e1292 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -186,16 +186,21 @@ class Facts(object):
if self.facts['system'] == 'Linux':
self.get_distribution_facts()
elif self.facts['system'] == 'AIX':
- try:
- rc, out, err = module.run_command("/usr/sbin/bootinfo -p")
+ # Attempt to use getconf to figure out architecture
+ # fall back to bootinfo if needed
+ if module.get_bin_path('getconf'):
+ rc, out, err = module.run_command([module.get_bin_path('getconf'),
+ 'MACHINE_ARCHITECTURE'])
+ data = out.split('\n')
+ self.facts['architecture'] = data[0]
+ else:
+ rc, out, err = module.run_command([module.get_bin_path('bootinfo'),
+ '-p'])
data = out.split('\n')
self.facts['architecture'] = data[0]
- except:
- self.facts['architecture'] = 'Not Available'
elif self.facts['system'] == 'OpenBSD':
self.facts['architecture'] = platform.uname()[5]
-
def get_local_facts(self):
fact_path = module.params.get('fact_path', None)
@@ -724,6 +729,7 @@ class LinuxHardware(Hardware):
self.get_dmi_facts()
self.get_device_facts()
self.get_uptime_facts()
+ self.get_lvm_facts()
try:
self.get_mount_facts()
except TimeoutError:
@@ -1048,7 +1054,8 @@ class LinuxHardware(Hardware):
pciid = m.group(1)
did = re.escape(pciid)
m = re.search("^" + did + "\s(.*)$", pcidata, re.MULTILINE)
- d['host'] = m.group(1)
+ if m:
+ d['host'] = m.group(1)
d['holders'] = []
if os.path.isdir(sysdir + "/holders"):
@@ -1067,6 +1074,37 @@ class LinuxHardware(Hardware):
uptime_seconds_string = get_file_content('/proc/uptime').split(' ')[0]
self.facts['uptime_seconds'] = int(float(uptime_seconds_string))
+ def get_lvm_facts(self):
+ """ Get LVM Facts if running as root and lvm utils are available """
+
+ if os.getuid() == 0 and module.get_bin_path('vgs'):
+ lvm_util_options = '--noheadings --nosuffix --units g'
+
+ vgs_path = module.get_bin_path('vgs')
+ #vgs fields: VG #PV #LV #SN Attr VSize VFree
+ vgs={}
+ if vgs_path:
+ rc, vg_lines, err = module.run_command( '%s %s' % (vgs_path, lvm_util_options))
+ for vg_line in vg_lines.splitlines():
+ items = vg_line.split()
+ vgs[items[0]] = {'size_g':items[-2],
+ 'free_g':items[-1],
+ 'num_lvs': items[2],
+ 'num_pvs': items[1]}
+
+ lvs_path = module.get_bin_path('lvs')
+ #lvs fields:
+ #LV VG Attr LSize Pool Origin Data% Move Log Copy% Convert
+ lvs = {}
+ if lvs_path:
+ rc, lv_lines, err = module.run_command( '%s %s' % (lvs_path, lvm_util_options))
+ for lv_line in lv_lines.splitlines():
+ items = lv_line.split()
+ lvs[items[0]] = {'size_g': items[3], 'vg': items[1]}
+
+ self.facts['lvm'] = {'lvs': lvs, 'vgs': vgs}
+
+
class SunOSHardware(Hardware):
"""
In addition to the generic memory and cpu facts, this also sets
@@ -1876,9 +1914,12 @@ class LinuxNetwork(Network):
if not line:
continue
words = line.split()
+ broadcast = ''
if words[0] == 'inet':
if '/' in words[1]:
address, netmask_length = words[1].split('/')
+ if len(words) > 3:
+ broadcast = words[3]
else:
# pointopoint interfaces do not have a prefix
address = words[1]
@@ -1892,6 +1933,7 @@ class LinuxNetwork(Network):
interfaces[iface] = {}
if not secondary and "ipv4" not in interfaces[iface]:
interfaces[iface]['ipv4'] = {'address': address,
+ 'broadcast': broadcast,
'netmask': netmask,
'network': network}
else:
@@ -1899,6 +1941,7 @@ class LinuxNetwork(Network):
interfaces[iface]["ipv4_secondaries"] = []
interfaces[iface]["ipv4_secondaries"].append({
'address': address,
+ 'broadcast': broadcast,
'netmask': netmask,
'network': network,
})
@@ -1909,12 +1952,14 @@ class LinuxNetwork(Network):
interfaces[device]["ipv4_secondaries"] = []
interfaces[device]["ipv4_secondaries"].append({
'address': address,
+ 'broadcast': broadcast,
'netmask': netmask,
'network': network,
})
# If this is the default address, update default_ipv4
if 'address' in default_ipv4 and default_ipv4['address'] == address:
+ default_ipv4['broadcast'] = broadcast
default_ipv4['netmask'] = netmask
default_ipv4['network'] = network
default_ipv4['macaddress'] = macaddress
@@ -2273,6 +2318,26 @@ class AIXNetwork(GenericBsdIfconfigNetwork, Network):
"""
platform = 'AIX'
+ def get_default_interfaces(self, route_path):
+ netstat_path = module.get_bin_path('netstat')
+
+ rc, out, err = module.run_command([netstat_path, '-nr'])
+
+ interface = dict(v4 = {}, v6 = {})
+
+ lines = out.split('\n')
+ for line in lines:
+ words = line.split()
+ if len(words) > 1 and words[0] == 'default':
+ if '.' in words[1]:
+ interface['v4']['gateway'] = words[1]
+ interface['v4']['interface'] = words[5]
+ elif ':' in words[1]:
+ interface['v6']['gateway'] = words[1]
+ interface['v6']['interface'] = words[5]
+
+ return interface['v4'], interface['v6']
+
# AIX 'ifconfig -a' does not have three words in the interface line
def get_interfaces_info(self, ifconfig_path, ifconfig_options):
interfaces = {}
@@ -2516,6 +2581,12 @@ class LinuxVirtual(Virtual):
self.facts['virtualization_role'] = 'guest'
return
+ systemd_container = get_file_content('/run/systemd/container')
+ if systemd_container:
+ self.facts['virtualization_type'] = systemd_container
+ self.facts['virtualization_role'] = 'guest'
+ return
+
if os.path.exists('/proc/1/cgroup'):
for line in get_file_lines('/proc/1/cgroup'):
if re.search(r'/docker(/|-[0-9a-f]+\.scope)', line):
@@ -2835,12 +2906,16 @@ def get_all_facts(module):
for (k, v) in facts.items():
setup_options["ansible_%s" % k.replace('-', '_')] = v
- # Look for the path to the facter and ohai binary and set
+ # Look for the path to the facter, cfacter, and ohai binaries and set
# the variable to that path.
facter_path = module.get_bin_path('facter')
+ cfacter_path = module.get_bin_path('cfacter')
ohai_path = module.get_bin_path('ohai')
+ # Prefer to use cfacter if available
+ if cfacter_path is not None:
+ facter_path = cfacter_path
# if facter is installed, and we can use --json because
# ruby-json is ALSO installed, include facter data in the JSON
diff --git a/lib/ansible/module_utils/powershell.ps1 b/lib/ansible/module_utils/powershell.ps1
index a11e316989..ee65916216 100644
--- a/lib/ansible/module_utils/powershell.ps1
+++ b/lib/ansible/module_utils/powershell.ps1
@@ -26,18 +26,14 @@
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
-# Helper function to parse Ansible JSON arguments from a file passed as
-# the single argument to the module
-# Example: $params = Parse-Args $args
-Function Parse-Args($arguments)
-{
- $parameters = New-Object psobject;
- If ($arguments.Length -gt 0)
- {
- $parameters = Get-Content $arguments[0] | ConvertFrom-Json;
- }
- $parameters;
-}
+# Ansible v2 will insert the module arguments below as a string containing
+# JSON; assign them to an environment variable and redefine $args so existing
+# modules will continue to work.
+$complex_args = @'
+<<INCLUDE_ANSIBLE_MODULE_WINDOWS_ARGS>>
+'@
+Set-Content env:MODULE_COMPLEX_ARGS -Value $complex_args
+$args = @('env:MODULE_COMPLEX_ARGS')
# Helper function to set an "attribute" on a psobject instance in powershell.
# This is a convenience to make adding Members to the object easier and
@@ -142,6 +138,28 @@ Function ConvertTo-Bool
return
}
+# Helper function to parse Ansible JSON arguments from a "file" passed as
+# the single argument to the module.
+# Example: $params = Parse-Args $args
+Function Parse-Args($arguments, $supports_check_mode = $false)
+{
+ $parameters = New-Object psobject
+ If ($arguments.Length -gt 0)
+ {
+ $parameters = Get-Content $arguments[0] | ConvertFrom-Json
+ }
+ $check_mode = Get-Attr $parameters "_ansible_check_mode" $false | ConvertTo-Bool
+ If ($check_mode -and -not $supports_check_mode)
+ {
+ $obj = New-Object psobject
+ Set-Attr $obj "skipped" $true
+ Set-Attr $obj "changed" $false
+ Set-Attr $obj "msg" "remote module does not support check mode"
+ Exit-Json $obj
+ }
+ $parameters
+}
+
# Helper function to calculate a hash of a file in a way which powershell 3
# and above can handle:
Function Get-FileChecksum($path)
diff --git a/v1/ansible/module_utils/vmware.py b/lib/ansible/module_utils/vmware.py
index e2d8c18ca4..6eb612de74 100644
--- a/v1/ansible/module_utils/vmware.py
+++ b/lib/ansible/module_utils/vmware.py
@@ -122,9 +122,9 @@ def connect_to_api(module, disconnect_atexit=True):
if disconnect_atexit:
atexit.register(connect.Disconnect, service_instance)
return service_instance.RetrieveContent()
- except vim.fault.InvalidLogin as invalid_login:
+ except vim.fault.InvalidLogin, invalid_login:
module.fail_json(msg=invalid_login.msg, apierror=str(invalid_login))
- except requests.ConnectionError as connection_error:
+ except requests.ConnectionError, connection_error:
module.fail_json(msg="Unable to connect to vCenter or ESXi API on TCP/443.", apierror=str(connection_error))
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
-Subproject 2bfddb015d2d5d162647957c31431baf30a3093
+Subproject 4721d6d8b5e251054b01ddaf8bb852e8204c2c9
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
-Subproject 14fffa5b53fb87835f6c3005fa289adbf0d8aa1
+Subproject 785cd24a02afd23fc98d8ae7713db3327e67af3
diff --git a/lib/ansible/parsing/__init__.py b/lib/ansible/parsing/__init__.py
index a7d414c043..7eb6017363 100644
--- a/lib/ansible/parsing/__init__.py
+++ b/lib/ansible/parsing/__init__.py
@@ -19,6 +19,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import copy
import json
import os
@@ -100,27 +101,32 @@ class DataLoader():
# if the file has already been read in and cached, we'll
# return those results to avoid more file/vault operations
if file_name in self._FILE_CACHE:
- return self._FILE_CACHE[file_name]
-
- # read the file contents and load the data structure from them
- (file_data, show_content) = self._get_file_contents(file_name)
- parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
+ parsed_data = self._FILE_CACHE[file_name]
+ else:
+ # read the file contents and load the data structure from them
+ (file_data, show_content) = self._get_file_contents(file_name)
+ parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
- # cache the file contents for next time
- self._FILE_CACHE[file_name] = parsed_data
+ # cache the file contents for next time
+ self._FILE_CACHE[file_name] = parsed_data
- return parsed_data
+ # return a deep copy here, so the cache is not affected
+ return copy.deepcopy(parsed_data)
def path_exists(self, path):
+ path = self.path_dwim(path)
return os.path.exists(path)
def is_file(self, path):
+ path = self.path_dwim(path)
return os.path.isfile(path)
def is_directory(self, path):
+ path = self.path_dwim(path)
return os.path.isdir(path)
def list_directory(self, path):
+ path = self.path_dwim(path)
return os.listdir(path)
def _safe_load(self, stream, file_name=None):
@@ -228,6 +234,7 @@ class DataLoader():
# try to create absolute path for loader basedir + templates/files/vars + filename
search.append(self.path_dwim(os.path.join(dirname,source)))
+ search.append(self.path_dwim(os.path.join(basedir, source)))
# try to create absolute path for loader basedir + filename
search.append(self.path_dwim(source))
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
index d75ed9416b..6b10dd5308 100644
--- a/lib/ansible/parsing/mod_args.py
+++ b/lib/ansible/parsing/mod_args.py
@@ -181,7 +181,7 @@ class ModuleArgsParser:
args = thing
elif isinstance(thing, string_types):
# form is like: local_action: copy src=a dest=b ... pretty common
- check_raw = action in ('command', 'shell', 'script')
+ check_raw = action in ('command', 'shell', 'script', 'raw')
args = parse_kv(thing, check_raw=check_raw)
elif thing is None:
# this can happen with modules which take no params, like ping:
@@ -218,7 +218,7 @@ class ModuleArgsParser:
elif isinstance(thing, string_types):
# form is like: copy: src=a dest=b ... common shorthand throughout ansible
(action, args) = self._split_module_string(thing)
- check_raw = action in ('command', 'shell', 'script')
+ check_raw = action in ('command', 'shell', 'script', 'raw')
args = parse_kv(args, check_raw=check_raw)
else:
@@ -234,10 +234,9 @@ class ModuleArgsParser:
task, dealing with all sorts of levels of fuzziness.
'''
- thing = None
-
+ thing = None
action = None
- delegate_to = self._task_ds.get('delegate_to', None)
+ connection = self._task_ds.get('connection', None)
args = dict()
@@ -256,11 +255,11 @@ class ModuleArgsParser:
# local_action
if 'local_action' in self._task_ds:
- # local_action is similar but also implies a delegate_to
+ # local_action is similar but also implies a connection='local'
if action is not None:
raise AnsibleParserError("action and local_action are mutually exclusive", obj=self._task_ds)
thing = self._task_ds.get('local_action', '')
- delegate_to = 'localhost'
+ connection = 'local'
action, args = self._normalize_parameters(thing, additional_args=additional_args)
# module: <stuff> is the more new-style invocation
@@ -289,4 +288,4 @@ class ModuleArgsParser:
# shell modules require special handling
(action, args) = self._handle_shell_weirdness(action, args)
- return (action, args, delegate_to)
+ return (action, args, connection)
diff --git a/lib/ansible/playbook/__init__.py b/lib/ansible/playbook/__init__.py
index 40e6638f23..37b3ee7503 100644
--- a/lib/ansible/playbook/__init__.py
+++ b/lib/ansible/playbook/__init__.py
@@ -26,7 +26,13 @@ from ansible.parsing import DataLoader
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.play import Play
from ansible.playbook.playbook_include import PlaybookInclude
-from ansible.plugins import push_basedir
+from ansible.plugins import get_all_plugin_loaders
+
+try:
+ from __main__ import display
+except ImportError:
+ from ansible.utils.display import Display
+ display = Display()
__all__ = ['Playbook']
@@ -57,8 +63,12 @@ class Playbook:
# set the loaders basedir
self._loader.set_basedir(self._basedir)
- # also add the basedir to the list of module directories
- push_basedir(self._basedir)
+ # dynamically load any plugins from the playbook directory
+ for name, obj in get_all_plugin_loaders():
+ if obj.subdir:
+ plugin_path = os.path.join(self._basedir, obj.subdir)
+ if os.path.isdir(plugin_path):
+ obj.add_directory(plugin_path)
ds = self._loader.load_from_file(os.path.basename(file_name))
if not isinstance(ds, list):
@@ -73,7 +83,10 @@ class Playbook:
if 'include' in entry:
pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader)
- self._entries.extend(pb._entries)
+ if pb is not None:
+ self._entries.extend(pb._entries)
+ else:
+ display.display("skipping playbook include '%s' due to conditional test failure" % entry.get('include', entry), color='cyan')
else:
entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader)
self._entries.append(entry_obj)
diff --git a/lib/ansible/playbook/attribute.py b/lib/ansible/playbook/attribute.py
index b2e89c7733..ec243abcd7 100644
--- a/lib/ansible/playbook/attribute.py
+++ b/lib/ansible/playbook/attribute.py
@@ -21,13 +21,17 @@ __metaclass__ = type
class Attribute:
- def __init__(self, isa=None, private=False, default=None, required=False, listof=None):
+ def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0):
self.isa = isa
self.private = private
self.default = default
self.required = required
self.listof = listof
+ self.priority = priority
+
+ def __cmp__(self, other):
+ return cmp(other.priority, self.priority)
class FieldAttribute(Attribute):
pass
diff --git a/lib/ansible/playbook/base.py b/lib/ansible/playbook/base.py
index 3205ce4f0d..56c42fc0ee 100644
--- a/lib/ansible/playbook/base.py
+++ b/lib/ansible/playbook/base.py
@@ -20,6 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import itertools
+import operator
import uuid
from functools import partial
@@ -35,9 +36,8 @@ from ansible.parsing import DataLoader
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.template import Templar
from ansible.utils.boolean import boolean
-
from ansible.utils.debug import debug
-
+from ansible.utils.vars import combine_vars
from ansible.template import template
class Base:
@@ -47,8 +47,10 @@ class Base:
_port = FieldAttribute(isa='int')
_remote_user = FieldAttribute(isa='string')
- # vars and flags
+ # variables
_vars = FieldAttribute(isa='dict', default=dict())
+
+ # flags and misc. settings
_environment = FieldAttribute(isa='list', default=[])
_no_log = FieldAttribute(isa='bool', default=False)
@@ -65,6 +67,13 @@ class Base:
# and initialize the base attributes
self._initialize_base_attributes()
+ try:
+ from __main__ import display
+ self._display = display
+ except ImportError:
+ from ansible.utils.display import Display
+ self._display = Display()
+
# The following three functions are used to programatically define data
# descriptors (aka properties) for the Attributes of all of the playbook
# objects (tasks, blocks, plays, etc).
@@ -154,24 +163,18 @@ class Base:
else:
self._loader = DataLoader()
- # FIXME: is this required anymore? This doesn't seem to do anything
- # helpful, and was added in very early stages of the base class
- # development.
- #if isinstance(ds, string_types) or isinstance(ds, FileIO):
- # ds = self._loader.load(ds)
-
# call the preprocess_data() function to massage the data into
# something we can more easily parse, and then call the validation
# function on it to ensure there are no incorrect key values
ds = self.preprocess_data(ds)
self._validate_attributes(ds)
- # Walk all attributes in the class.
- #
+ # Walk all attributes in the class. We sort them based on their priority
+ # so that certain fields can be loaded before others, if they are dependent.
# FIXME: we currently don't do anything with private attributes but
# may later decide to filter them out of 'ds' here.
-
- for name in self._get_base_attributes():
+ base_attributes = self._get_base_attributes()
+ for name, attr in sorted(base_attributes.items(), key=operator.itemgetter(1)):
# copy the value over unless a _load_field method is defined
if name in ds:
method = getattr(self, '_load_%s' % name, None)
@@ -350,6 +353,30 @@ class Base:
# restore the UUID field
setattr(self, '_uuid', data.get('uuid'))
+ def _load_vars(self, attr, ds):
+ '''
+ Vars in a play can be specified either as a dictionary directly, or
+ as a list of dictionaries. If the later, this method will turn the
+ list into a single dictionary.
+ '''
+
+ try:
+ if isinstance(ds, dict):
+ return ds
+ elif isinstance(ds, list):
+ all_vars = dict()
+ for item in ds:
+ if not isinstance(item, dict):
+ raise ValueError
+ all_vars = combine_vars(all_vars, item)
+ return all_vars
+ elif ds is None:
+ return {}
+ else:
+ raise ValueError
+ except ValueError:
+ raise AnsibleParserError("Vars in a %s must be specified as a dictionary, or a list of dictionaries" % self.__class__.__name__, obj=ds)
+
def _extend_value(self, value, new_value):
'''
Will extend the value given with new_value (and will turn both
diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py
index 3d3e78166e..006ac828fe 100644
--- a/lib/ansible/playbook/block.py
+++ b/lib/ansible/playbook/block.py
@@ -37,12 +37,13 @@ class Block(Base, Become, Conditional, Taggable):
# similar to the 'else' clause for exceptions
#_otherwise = FieldAttribute(isa='list')
- def __init__(self, play=None, parent_block=None, role=None, task_include=None, use_handlers=False):
+ def __init__(self, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, implicit=False):
self._play = play
self._role = role
self._task_include = task_include
self._parent_block = parent_block
self._use_handlers = use_handlers
+ self._implicit = implicit
self._dep_chain = []
super(Block, self).__init__()
@@ -53,7 +54,7 @@ class Block(Base, Become, Conditional, Taggable):
of a role or task include which does, so return those if present.
'''
- all_vars = dict()
+ all_vars = self.vars.copy()
if self._role:
all_vars.update(self._role.get_vars(self._dep_chain))
@@ -62,27 +63,31 @@ class Block(Base, Become, Conditional, Taggable):
if self._task_include:
all_vars.update(self._task_include.get_vars())
- all_vars.update(self.vars)
return all_vars
@staticmethod
def load(data, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
- b = Block(play=play, parent_block=parent_block, role=role, task_include=task_include, use_handlers=use_handlers)
+ implicit = not Block.is_block(data)
+ b = Block(play=play, parent_block=parent_block, role=role, task_include=task_include, use_handlers=use_handlers, implicit=implicit)
return b.load_data(data, variable_manager=variable_manager, loader=loader)
+ @staticmethod
+ def is_block(ds):
+ is_block = False
+ if isinstance(ds, dict):
+ for attr in ('block', 'rescue', 'always'):
+ if attr in ds:
+ is_block = True
+ break
+ return is_block
+
def preprocess_data(self, ds):
'''
If a simple task is given, an implicit block for that single task
is created, which goes in the main portion of the block
'''
- is_block = False
- for attr in ('block', 'rescue', 'always'):
- if attr in ds:
- is_block = True
- break
-
- if not is_block:
+ if not Block.is_block(ds):
if isinstance(ds, list):
return super(Block, self).preprocess_data(dict(block=ds))
else:
diff --git a/lib/ansible/playbook/conditional.py b/lib/ansible/playbook/conditional.py
index ae7a5f0ba4..f0acbbdb3f 100644
--- a/lib/ansible/playbook/conditional.py
+++ b/lib/ansible/playbook/conditional.py
@@ -66,8 +66,6 @@ class Conditional:
for conditional in self.when:
if not self._check_conditional(conditional, templar, all_vars):
return False
- except UndefinedError, e:
- raise AnsibleError("The conditional check '%s' failed due to an undefined variable. The error was: %s" % (conditional, e), obj=ds)
except Exception, e:
raise AnsibleError("The conditional check '%s' failed. The error was: %s" % (conditional, e), obj=ds)
@@ -96,7 +94,7 @@ class Conditional:
# a Jinja2 evaluation that results in something Python can eval!
presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
- conditional = templar.template(presented)
+ conditional = templar.template(presented, fail_on_undefined=False)
val = conditional.strip()
if val == presented:
diff --git a/lib/ansible/playbook/helpers.py b/lib/ansible/playbook/helpers.py
index d982413971..98bef15e2a 100644
--- a/lib/ansible/playbook/helpers.py
+++ b/lib/ansible/playbook/helpers.py
@@ -52,7 +52,13 @@ def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=Non
variable_manager=variable_manager,
loader=loader
)
- block_list.append(b)
+ # Implicit blocks are created by bare tasks listed in a play withou
+ # an explicit block statement. If we have two implicit blocks in a row,
+ # squash them down to a single block to save processing time later.
+ if b._implicit and len(block_list) > 0 and block_list[-1]._implicit:
+ block_list[-1].block.extend(b.block)
+ else:
+ block_list.append(b)
return block_list
@@ -98,7 +104,7 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
return task_list
-def load_list_of_roles(ds, current_role_path=None, variable_manager=None, loader=None):
+def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None, loader=None):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions
@@ -112,7 +118,7 @@ def load_list_of_roles(ds, current_role_path=None, variable_manager=None, loader
roles = []
for role_def in ds:
- i = RoleInclude.load(role_def, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
+ i = RoleInclude.load(role_def, play=play, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
roles.append(i)
return roles
diff --git a/lib/ansible/playbook/included_file.py b/lib/ansible/playbook/included_file.py
index 92bf325f5b..d8bc497dcf 100644
--- a/lib/ansible/playbook/included_file.py
+++ b/lib/ansible/playbook/included_file.py
@@ -19,6 +19,8 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import os
+
from ansible.template import Templar
class IncludedFile:
@@ -49,6 +51,8 @@ class IncludedFile:
if res._task.action == 'include':
if res._task.loop:
+ if 'results' not in res._result:
+ continue
include_results = res._result['results']
else:
include_results = [ res._result ]
@@ -59,19 +63,37 @@ class IncludedFile:
continue
original_task = iterator.get_original_task(res._host, res._task)
- if original_task and original_task._role:
- include_file = loader.path_dwim_relative(original_task._role._role_path, 'tasks', include_result['include'])
- else:
- include_file = loader.path_dwim(res._task.args.get('_raw_params'))
task_vars = variable_manager.get_vars(loader=loader, play=iterator._play, host=res._host, task=original_task)
- #task_vars = tqm.add_tqm_variables(task_vars, play=iterator._play)
templar = Templar(loader=loader, variables=task_vars)
include_variables = include_result.get('include_variables', dict())
if 'item' in include_result:
- include_variables['item'] = include_result['item']
- task_vars['item'] = include_result['item']
+ task_vars['item'] = include_variables['item'] = include_result['item']
+
+ if original_task:
+ if original_task._task_include:
+ # handle relative includes by walking up the list of parent include
+ # tasks and checking the relative result to see if it exists
+ parent_include = original_task._task_include
+ while parent_include is not None:
+ parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
+ if original_task._role:
+ new_basedir = os.path.join(original_task._role._role_path, 'tasks', parent_include_dir)
+ include_file = loader.path_dwim_relative(new_basedir, 'tasks', include_result['include'])
+ else:
+ include_file = loader.path_dwim_relative(loader.get_basedir(), parent_include_dir, include_result['include'])
+
+ if os.path.exists(include_file):
+ break
+ else:
+ parent_include = parent_include._task_include
+ elif original_task._role:
+ include_file = loader.path_dwim_relative(original_task._role._role_path, 'tasks', include_result['include'])
+ else:
+ include_file = loader.path_dwim(res._task.args.get('_raw_params'))
+ else:
+ include_file = loader.path_dwim(res._task.args.get('_raw_params'))
include_file = templar.template(include_file)
inc_file = IncludedFile(include_file, include_variables, original_task)
diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py
index ca043e462b..7b3a862911 100644
--- a/lib/ansible/playbook/play.py
+++ b/lib/ansible/playbook/play.py
@@ -32,8 +32,6 @@ from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.playbook.task import Task
-from ansible.utils.vars import combine_vars
-
__all__ = ['Play']
@@ -68,15 +66,15 @@ class Play(Base, Taggable, Become):
_vars_prompt = FieldAttribute(isa='list', default=[])
_vault_password = FieldAttribute(isa='string')
+ # Role Attributes
+ _roles = FieldAttribute(isa='list', default=[], priority=100)
+
# Block (Task) Lists Attributes
_handlers = FieldAttribute(isa='list', default=[])
_pre_tasks = FieldAttribute(isa='list', default=[])
_post_tasks = FieldAttribute(isa='list', default=[])
_tasks = FieldAttribute(isa='list', default=[])
- # Role Attributes
- _roles = FieldAttribute(isa='list', default=[])
-
# Flag/Setting Attributes
_any_errors_fatal = FieldAttribute(isa='bool', default=False)
_force_handlers = FieldAttribute(isa='bool')
@@ -149,30 +147,6 @@ class Play(Base, Taggable, Become):
return ds
- def _load_vars(self, attr, ds):
- '''
- Vars in a play can be specified either as a dictionary directly, or
- as a list of dictionaries. If the later, this method will turn the
- list into a single dictionary.
- '''
-
- try:
- if isinstance(ds, dict):
- return ds
- elif isinstance(ds, list):
- all_vars = dict()
- for item in ds:
- if not isinstance(item, dict):
- raise ValueError
- all_vars = combine_vars(all_vars, item)
- return all_vars
- elif ds is None:
- return {}
- else:
- raise ValueError
- except ValueError:
- raise AnsibleParserError("Vars in a playbook must be specified as a dictionary, or a list of dictionaries", obj=ds)
-
def _load_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
@@ -210,7 +184,7 @@ class Play(Base, Taggable, Become):
if ds is None:
ds = []
- role_includes = load_list_of_roles(ds, variable_manager=self._variable_manager, loader=self._loader)
+ role_includes = load_list_of_roles(ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
roles = []
for ri in role_includes:
@@ -231,6 +205,14 @@ class Play(Base, Taggable, Become):
'''
return value
+ # disable validation on various fields which will be validated later in other objects
+ def _post_validate_become(self, attr, value, templar):
+ return value
+ def _post_validate_become_user(self, attr, value, templar):
+ return value
+ def _post_validate_become_method(self, attr, value, templar):
+ return value
+
# FIXME: post_validation needs to ensure that become/su/sudo have only 1 set
def _compile_roles(self):
diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py
index f6ae473927..e57648d24e 100644
--- a/lib/ansible/playbook/play_context.py
+++ b/lib/ansible/playbook/play_context.py
@@ -134,6 +134,18 @@ SU_PROMPT_LOCALIZATIONS = [
'密碼',
]
+TASK_ATTRIBUTE_OVERRIDES = (
+ 'become',
+ 'become_user',
+ 'become_pass',
+ 'become_method',
+ 'connection',
+ 'delegate_to',
+ 'no_log',
+ 'remote_user',
+)
+
+
class PlayContext(Base):
'''
@@ -187,7 +199,6 @@ class PlayContext(Base):
self.password = passwords.get('conn_pass','')
self.become_pass = passwords.get('become_pass','')
- #TODO: just pull options setup to above?
# set options before play to allow play to override them
if options:
self.set_options(options)
@@ -227,7 +238,7 @@ class PlayContext(Base):
'''
Configures this connection information instance with data from
options specified by the user on the command line. These have a
- higher precedence than those set on the play or host.
+ lower precedence than those set on the play or host.
'''
if options.connection:
@@ -275,24 +286,7 @@ class PlayContext(Base):
elif isinstance(options.skip_tags, basestring):
self.skip_tags.update(options.skip_tags.split(','))
- #def copy(self, ci):
- # '''
- # Copies the connection info from another connection info object, used
- # when merging in data from task overrides.
- # '''
- #
- # for field in self._get_fields():
- # value = getattr(ci, field, None)
- # if isinstance(value, dict):
- # setattr(self, field, value.copy())
- # elif isinstance(value, set):
- # setattr(self, field, value.copy())
- # elif isinstance(value, list):
- # setattr(self, field, value[:])
- # else:
- # setattr(self, field, value)
-
- def set_task_and_host_override(self, task, host):
+ def set_task_and_variable_override(self, task, variables):
'''
Sets attributes from the task if they are set, which will override
those from the play.
@@ -302,20 +296,23 @@ class PlayContext(Base):
# loop through a subset of attributes on the task object and set
# connection fields based on their values
- for attr in ('connection', 'remote_user', 'become', 'become_user', 'become_pass', 'become_method', 'no_log'):
+ for attr in TASK_ATTRIBUTE_OVERRIDES:
if hasattr(task, attr):
attr_val = getattr(task, attr)
if attr_val is not None:
setattr(new_info, attr, attr_val)
# finally, use the MAGIC_VARIABLE_MAPPING dictionary to update this
- # connection info object with 'magic' variables from inventory
- variables = host.get_vars()
+ # connection info object with 'magic' variables from the variable list
for (attr, variable_names) in MAGIC_VARIABLE_MAPPING.iteritems():
for variable_name in variable_names:
if variable_name in variables:
setattr(new_info, attr, variables[variable_name])
+ # make sure we get port defaults if needed
+ if new_info.port is None and C.DEFAULT_REMOTE_PORT is not None:
+ new_info.port = int(C.DEFAULT_REMOTE_PORT)
+
# become legacy updates
if not new_info.become_pass:
if new_info.become_method == 'sudo' and new_info.sudo_pass:
@@ -339,7 +336,6 @@ class PlayContext(Base):
becomecmd = None
randbits = ''.join(chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
success_key = 'BECOME-SUCCESS-%s' % randbits
- #executable = executable or '$SHELL'
success_cmd = pipes.quote('echo %s; %s' % (success_key, cmd))
if self.become_method == 'sudo':
@@ -350,9 +346,13 @@ class PlayContext(Base):
# sudo prompt set with the -p option.
prompt = '[sudo via ansible, key=%s] password: ' % randbits
exe = self.become_exe or self.sudo_exe or 'sudo'
- flags = self.become_flags or self.sudo_flags or ''
- becomecmd = '%s -k && %s %s -S -p "%s" -u %s %s -c %s' % \
- (exe, exe, flags or C.DEFAULT_SUDO_FLAGS, prompt, self.become_user, executable, success_cmd)
+ flags = self.become_flags or self.sudo_flags or C.DEFAULT_SUDO_FLAGS
+
+ # force quick error if password is required but not supplied, should prevent sudo hangs.
+ if not self.become_pass:
+ flags += " -n "
+
+ becomecmd = '%s %s -S -p "%s" -u %s %s -c %s' % (exe, flags, prompt, self.become_user, executable, success_cmd)
elif self.become_method == 'su':
@@ -379,27 +379,37 @@ class PlayContext(Base):
# No user as it uses it's own exec_attr to figure it out
becomecmd = '%s %s "%s"' % (exe, flags, success_cmd)
+ elif self.become_method == 'runas':
+ raise AnsibleError("'runas' is not yet implemented")
+ #TODO: figure out prompt
+ # this is not for use with winrm plugin but if they ever get ssh native on windoez
+ exe = self.become_exe or 'runas'
+ flags = self.become_flags or ''
+ becomecmd = '%s %s /user:%s "%s"' % (exe, flags, self.become_user, success_cmd)
+
+ elif self.become_method == 'doas':
+
+ prompt = 'Password:'
+ exe = self.become_exe or 'doas'
+ flags = self.become_flags or ''
+
+ if not self.become_pass:
+ flags += ' -n '
+
+ if self.become_user:
+ flags += ' -u %s ' % self.become_user
+
+ becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (exe, flags, success_key, exe, flags, cmd)
+
else:
raise AnsibleError("Privilege escalation method not found: %s" % self.become_method)
self.prompt = prompt
self.success_key = success_key
- return ('%s -c ' % executable) + pipes.quote(becomecmd)
+ return ('%s -c %s' % (executable, pipes.quote(becomecmd)))
return cmd
- #def _get_fields(self):
- # return [i for i in self.__dict__.keys() if i[:1] != '_']
-
- #def post_validate(self, templar):
- # '''
- # Finalizes templated values which may be set on this objects fields.
- # '''
- #
- # for field in self._get_fields():
- # value = templar.template(getattr(self, field))
- # setattr(self, field, value)
-
def update_vars(self, variables):
'''
Adds 'magic' variables relating to connections to the variable dictionary provided.
diff --git a/lib/ansible/playbook/playbook_include.py b/lib/ansible/playbook/playbook_include.py
index f1629b4f15..777f81c515 100644
--- a/lib/ansible/playbook/playbook_include.py
+++ b/lib/ansible/playbook/playbook_include.py
@@ -21,14 +21,16 @@ __metaclass__ = type
import os
+from ansible.errors import AnsibleParserError
from ansible.parsing.splitter import split_args, parse_kv
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
+from ansible.playbook.conditional import Conditional
from ansible.playbook.taggable import Taggable
-from ansible.errors import AnsibleParserError
+from ansible.template import Templar
-class PlaybookInclude(Base, Taggable):
+class PlaybookInclude(Base, Conditional, Taggable):
_name = FieldAttribute(isa='string')
_include = FieldAttribute(isa='string')
@@ -52,6 +54,14 @@ class PlaybookInclude(Base, Taggable):
# playbook objects
new_obj = super(PlaybookInclude, self).load_data(ds, variable_manager, loader)
+ all_vars = dict()
+ if variable_manager:
+ all_vars = variable_manager.get_vars(loader=loader)
+
+ templar = Templar(loader=loader, variables=all_vars)
+ if not new_obj.evaluate_conditional(templar=templar, all_vars=all_vars):
+ return None
+
# then we use the object to load a Playbook
pb = Playbook(loader=loader)
@@ -64,7 +74,9 @@ class PlaybookInclude(Base, Taggable):
# finally, update each loaded playbook entry with any variables specified
# on the included playbook and/or any tags which may have been set
for entry in pb._entries:
- entry.vars.update(new_obj.vars)
+ temp_vars = entry.vars.copy()
+ temp_vars.update(new_obj.vars)
+ entry.vars = temp_vars
entry.tags = list(set(entry.tags).union(new_obj.tags))
return pb
diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py
index b805555729..1a6f99540c 100644
--- a/lib/ansible/playbook/role/__init__.py
+++ b/lib/ansible/playbook/role/__init__.py
@@ -37,7 +37,7 @@ from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.include import RoleInclude
from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable
-from ansible.plugins import get_all_plugin_loaders, push_basedir
+from ansible.plugins import get_all_plugin_loaders
from ansible.utils.vars import combine_vars
@@ -80,8 +80,8 @@ class Role(Base, Become, Conditional, Taggable):
self._handler_blocks = []
self._default_vars = dict()
self._role_vars = dict()
- self._had_task_run = False
- self._completed = False
+ self._had_task_run = dict()
+ self._completed = dict()
super(Role, self).__init__()
@@ -93,7 +93,6 @@ class Role(Base, Become, Conditional, Taggable):
@staticmethod
def load(role_include, play, parent_role=None):
- # FIXME: add back in the role caching support
try:
# The ROLE_CACHE is a dictionary of role names, with each entry
# containing another dictionary corresponding to a set of parameters
@@ -119,6 +118,16 @@ class Role(Base, Become, Conditional, Taggable):
if role_include.role not in play.ROLE_CACHE:
play.ROLE_CACHE[role_include.role] = dict()
+ if parent_role:
+ if parent_role.when:
+ new_when = parent_role.when[:]
+ new_when.extend(r.when or [])
+ r.when = new_when
+ if parent_role.tags:
+ new_tags = parent_role.tags[:]
+ new_tags.extend(r.tags or [])
+ r.tags = new_tags
+
play.ROLE_CACHE[role_include.role][hashed_params] = r
return r
@@ -133,8 +142,6 @@ class Role(Base, Become, Conditional, Taggable):
self._variable_manager = role_include.get_variable_manager()
self._loader = role_include.get_loader()
- push_basedir(self._role_path)
-
if parent_role:
self.add_parent(parent_role)
@@ -296,13 +303,13 @@ class Role(Base, Become, Conditional, Taggable):
block_list.extend(self._handler_blocks)
return block_list
- def has_run(self):
+ def has_run(self, host):
'''
Returns true if this role has been iterated over completely and
at least one task was run
'''
- return self._had_task_run and self._completed and not self._metadata.allow_duplicates
+ return host.name in self._completed and not self._metadata.allow_duplicates
def compile(self, play, dep_chain=[]):
'''
@@ -341,8 +348,8 @@ class Role(Base, Become, Conditional, Taggable):
res['_role_vars'] = self._role_vars
res['_role_params'] = self._role_params
res['_default_vars'] = self._default_vars
- res['_had_task_run'] = self._had_task_run
- res['_completed'] = self._completed
+ res['_had_task_run'] = self._had_task_run.copy()
+ res['_completed'] = self._completed.copy()
if self._metadata:
res['_metadata'] = self._metadata.serialize()
@@ -366,8 +373,8 @@ class Role(Base, Become, Conditional, Taggable):
self._role_vars = data.get('_role_vars', dict())
self._role_params = data.get('_role_params', dict())
self._default_vars = data.get('_default_vars', dict())
- self._had_task_run = data.get('_had_task_run', False)
- self._completed = data.get('_completed', False)
+ self._had_task_run = data.get('_had_task_run', dict())
+ self._completed = data.get('_completed', dict())
if include_deps:
deps = []
diff --git a/lib/ansible/playbook/role/definition.py b/lib/ansible/playbook/role/definition.py
index d8dbb464da..a54febe1fe 100644
--- a/lib/ansible/playbook/role/definition.py
+++ b/lib/ansible/playbook/role/definition.py
@@ -31,6 +31,7 @@ from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.conditional import Conditional
from ansible.playbook.taggable import Taggable
+from ansible.template import Templar
from ansible.utils.path import unfrackpath
@@ -41,7 +42,11 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
_role = FieldAttribute(isa='string')
- def __init__(self, role_basedir=None):
+ def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None):
+ self._play = play
+ self._variable_manager = variable_manager
+ self._loader = loader
+
self._role_path = None
self._role_basedir = role_basedir
self._role_params = dict()
@@ -112,6 +117,14 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
if not role_name or not isinstance(role_name, string_types):
raise AnsibleError('role definitions must contain a role name', obj=ds)
+ # if we have the required datastructures, and if the role_name
+ # contains a variable, try and template it now
+ if self._play and self._variable_manager:
+ all_vars = self._variable_manager.get_vars(loader=self._loader, play=self._play)
+ templar = Templar(loader=self._loader, variables=all_vars)
+ if templar._contains_vars(role_name):
+ role_name = templar.template(role_name)
+
return role_name
def _load_role_path(self, role_name):
@@ -155,7 +168,7 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
# FIXME: make the parser smart about list/string entries in
# the yaml so the error line/file can be reported here
- raise AnsibleError("the role '%s' was not found" % role_name)
+ raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)))
def _split_role_params(self, ds):
'''
diff --git a/lib/ansible/playbook/role/include.py b/lib/ansible/playbook/role/include.py
index 93cf0e2179..07ebf3f0d6 100644
--- a/lib/ansible/playbook/role/include.py
+++ b/lib/ansible/playbook/role/include.py
@@ -38,14 +38,14 @@ class RoleInclude(RoleDefinition):
FIXME: docstring
"""
- def __init__(self, role_basedir=None):
- super(RoleInclude, self).__init__(role_basedir=role_basedir)
+ def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None):
+ super(RoleInclude, self).__init__(play=play, role_basedir=role_basedir, variable_manager=variable_manager, loader=loader)
@staticmethod
- def load(data, current_role_path=None, parent_role=None, variable_manager=None, loader=None):
+ def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None):
assert isinstance(data, string_types) or isinstance(data, dict) or isinstance(data, AnsibleBaseYAMLObject)
- ri = RoleInclude(role_basedir=current_role_path)
+ ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader)
return ri.load_data(data, variable_manager=variable_manager, loader=loader)
diff --git a/lib/ansible/playbook/role/metadata.py b/lib/ansible/playbook/role/metadata.py
index 61e92ce9b5..cd56e60633 100644
--- a/lib/ansible/playbook/role/metadata.py
+++ b/lib/ansible/playbook/role/metadata.py
@@ -72,7 +72,7 @@ class RoleMetadata(Base):
if self._owner:
current_role_path = os.path.dirname(self._owner._role_path)
- return load_list_of_roles(ds, current_role_path=current_role_path, variable_manager=self._variable_manager, loader=self._loader)
+ return load_list_of_roles(ds, play=self._owner._play, current_role_path=current_role_path, variable_manager=self._variable_manager, loader=self._loader)
def _load_galaxy_info(self, attr, ds):
'''
diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py
index ca163ca7e5..ee6bbbacba 100644
--- a/lib/ansible/playbook/task.py
+++ b/lib/ansible/playbook/task.py
@@ -19,11 +19,13 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+from six import string_types
+
from ansible.errors import AnsibleError
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.splitter import parse_kv
-from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
+from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping, AnsibleUnicode
from ansible.plugins import module_loader, lookup_loader
@@ -68,16 +70,10 @@ class Task(Base, Conditional, Taggable, Become):
_failed_when = FieldAttribute(isa='string')
_first_available_file = FieldAttribute(isa='list')
_ignore_errors = FieldAttribute(isa='bool')
-
_loop = FieldAttribute(isa='string', private=True)
_loop_args = FieldAttribute(isa='list', private=True)
_local_action = FieldAttribute(isa='string')
-
- # FIXME: this should not be a Task
- _meta = FieldAttribute(isa='string')
-
_name = FieldAttribute(isa='string', default='')
-
_notify = FieldAttribute(isa='list')
_poll = FieldAttribute(isa='int')
_register = FieldAttribute(isa='string')
@@ -161,21 +157,39 @@ class Task(Base, Conditional, Taggable, Become):
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds)
- (action, args, delegate_to) = args_parser.parse()
+ (action, args, connection) = args_parser.parse()
new_ds['action'] = action
new_ds['args'] = args
- new_ds['delegate_to'] = delegate_to
+ new_ds['connection'] = connection
+
+ # we handle any 'vars' specified in the ds here, as we may
+ # be adding things to them below (special handling for includes).
+ # When that deprecated feature is removed, this can be too.
+ if 'vars' in ds:
+ # _load_vars is defined in Base, and is used to load a dictionary
+ # or list of dictionaries in a standard way
+ new_ds['vars'] = self._load_vars(None, ds.pop('vars'))
+ else:
+ new_ds['vars'] = dict()
for (k,v) in ds.iteritems():
- if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
+ if k in ('action', 'local_action', 'args', 'connection') or k == action or k == 'shell':
# we don't want to re-assign these values, which were
# determined by the ModuleArgsParser() above
continue
elif k.replace("with_", "") in lookup_loader:
self._preprocess_loop(ds, new_ds, k, v)
else:
- new_ds[k] = v
+ # pre-2.0 syntax allowed variables for include statements at the
+ # top level of the task, so we move those into the 'vars' dictionary
+ # here, and show a deprecation message as we will remove this at
+ # some point in the future.
+ if action == 'include' and k not in self._get_base_attributes():
+ self._display.deprecated("Specifying include variables at the top-level of the task is deprecated. Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\nfor currently supported syntax regarding included files and variables")
+ new_ds['vars'][k] = v
+ else:
+ new_ds[k] = v
return super(Task, self).preprocess_data(new_ds)
@@ -192,20 +206,38 @@ class Task(Base, Conditional, Taggable, Become):
super(Task, self).post_validate(templar)
+ def _post_validate_loop_args(self, attr, value, templar):
+ '''
+ Override post validation for the loop args field, which is templated
+ specially in the TaskExecutor class when evaluating loops.
+ '''
+ return value
+
+ def _post_validate_environment(self, attr, value, templar):
+ '''
+ Override post validation of vars on the play, as we don't want to
+ template these too early.
+ '''
+ for env_item in value:
+ if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
+ self._display.deprecated("Using bare variables for environment is deprecated. Update your playbooks so that the environment value uses the full variable syntax ('{{foo}}')")
+ break
+ return templar.template(value, convert_bare=True)
+
def get_vars(self):
- all_vars = self.vars.copy()
+ all_vars = dict()
if self._block:
all_vars.update(self._block.get_vars())
if self._task_include:
all_vars.update(self._task_include.get_vars())
- #if isinstance(self.args, dict):
- # all_vars.update(self.args)
+ all_vars.update(self.vars)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
+
return all_vars
def copy(self, exclude_block=False):
diff --git a/lib/ansible/plugins/__init__.py b/lib/ansible/plugins/__init__.py
index 13a3da7177..4054c61633 100644
--- a/lib/ansible/plugins/__init__.py
+++ b/lib/ansible/plugins/__init__.py
@@ -42,6 +42,11 @@ PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
_basedirs = []
+# FIXME: the _basedirs code may be dead, and no longer needed, as
+# we now use add_directory for all plugin types here instead
+# of relying on this global variable (which also causes problems
+# with forked processes). See the Playbook() and Role() classes
+# for how we now ue get_all_plugin_loaders() below.
def push_basedir(basedir):
# avoid pushing the same absolute dir more than once
basedir = to_unicode(os.path.realpath(basedir))
@@ -383,6 +388,13 @@ filter_loader = PluginLoader(
'filter_plugins',
)
+test_loader = PluginLoader(
+ 'TestModule',
+ 'ansible.plugins.test',
+ C.DEFAULT_TEST_PLUGIN_PATH,
+ 'test_plugins'
+)
+
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py
index 87327ed2b4..58276d8f8e 100644
--- a/lib/ansible/plugins/action/__init__.py
+++ b/lib/ansible/plugins/action/__init__.py
@@ -69,9 +69,29 @@ class ActionBase:
# Search module path(s) for named module.
module_suffixes = getattr(self._connection, 'default_suffixes', None)
+
+ # Check to determine if PowerShell modules are supported, and apply
+ # some fixes (hacks) to module name + args.
+ if module_suffixes and '.ps1' in module_suffixes:
+ # Use Windows versions of stat/file/copy modules when called from
+ # within other action plugins.
+ if module_name in ('stat', 'file', 'copy') and self._task.action != module_name:
+ module_name = 'win_%s' % module_name
+ # Remove extra quotes surrounding path parameters before sending to module.
+ if module_name in ('win_stat', 'win_file', 'win_copy', 'slurp') and module_args and hasattr(self._connection._shell, '_unquote'):
+ for key in ('src', 'dest', 'path'):
+ if key in module_args:
+ module_args[key] = self._connection._shell._unquote(module_args[key])
+
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, module_suffixes)
if module_path is None:
- module_path2 = self._shared_loader_obj.module_loader.find_plugin('ping', module_suffixes)
+ # Use Windows version of ping module to check module paths when
+ # using a connection that supports .ps1 suffixes.
+ if module_suffixes and '.ps1' in module_suffixes:
+ ping_module = 'win_ping'
+ else:
+ ping_module = 'ping'
+ module_path2 = self._shared_loader_obj.module_loader.find_plugin(ping_module, module_suffixes)
if module_path2 is not None:
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
else:
@@ -96,8 +116,8 @@ class ActionBase:
environments = [ environments ]
for environment in environments:
- if type(environment) != dict:
- raise errors.AnsibleError("environment must be a dictionary, received %s" % environment)
+ if not isinstance(environment, dict):
+ raise AnsibleError("environment must be a dictionary, received %s (%s)" % (environment, type(environment)))
# very deliberatly using update here instead of combine_vars, as
# these environment settings should not need to merge sub-dicts
final_environment.update(environment)
@@ -144,7 +164,7 @@ class ActionBase:
tmp_mode = None
if self._play_context.remote_user != 'root' or self._play_context.become and self._play_context.become_user != 'root':
- tmp_mode = 'a+rx'
+ tmp_mode = 0755
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode)
self._display.debug("executing _low_level_execute_command to create the tmp path")
@@ -230,14 +250,13 @@ class ActionBase:
self._display.debug("done with chmod call")
return res
- def _remote_checksum(self, tmp, path):
+ def _remote_checksum(self, tmp, path, all_vars):
'''
Takes a remote checksum and returns 1 if no file
'''
- # FIXME: figure out how this will work, probably pulled from the variable manager data
- #python_interp = inject['hostvars'][inject['inventory_hostname']].get('ansible_python_interpreter', 'python')
- python_interp = 'python'
+ python_interp = all_vars.get('ansible_python_interpreter', 'python')
+
cmd = self._connection._shell.checksum(path, python_interp)
self._display.debug("calling _low_level_execute_command to get the remote checksum")
data = self._low_level_execute_command(cmd, tmp, sudoable=True)
@@ -253,21 +272,17 @@ class ActionBase:
else:
return data2.split()[0]
except IndexError:
- # FIXME: this should probably not print to sys.stderr, but should instead
- # fail in a more normal way?
- sys.stderr.write("warning: Calculating checksum failed unusually, please report this to the list so it can be fixed\n")
- sys.stderr.write("command: %s\n" % cmd)
- sys.stderr.write("----\n")
- sys.stderr.write("output: %s\n" % data)
- sys.stderr.write("----\n")
+ self._display.warning("Calculating checksum failed unusually, please report this to " + \
+ "the list so it can be fixed\ncommand: %s\n----\noutput: %s\n----\n") % (cmd, data)
# this will signal that it changed and allow things to keep going
return "INVALIDCHECKSUM"
def _remote_expand_user(self, path, tmp):
''' takes a remote path and performs tilde expansion on the remote host '''
- if not path.startswith('~'):
+ if not path.startswith('~'): # FIXME: Windows paths may start with "~ instead of just ~
return path
+ # FIXME: Can't use os.path.sep for Windows paths.
split_path = path.split(os.path.sep, 1)
expand_path = split_path[0]
if expand_path == '~':
@@ -340,6 +355,8 @@ class ActionBase:
remote_module_path = None
if not tmp and self._late_needs_tmp_path(tmp, module_style):
tmp = self._make_tmp_path()
+
+ if tmp:
remote_module_path = self._connection._shell.join_path(tmp, module_name)
# FIXME: async stuff here?
@@ -436,7 +453,8 @@ class ActionBase:
self._display.debug("no command, exiting _low_level_execute_command()")
return dict(stdout='', stderr='')
- if sudoable:
+ if sudoable and self._play_context.become:
+ self._display.debug("using become for this command")
cmd = self._play_context.make_become_cmd(cmd, executable=executable)
self._display.debug("executing the command %s through the connection" % cmd)
@@ -457,7 +475,7 @@ class ActionBase:
if rc is None:
rc = 0
- return dict(rc=rc, stdout=out, stderr=err)
+ return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err)
def _get_first_available_file(self, faf, of=None, searchdir='files'):
@@ -523,7 +541,7 @@ class ActionBase:
diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
diff['after_header'] = source
- diff['after'] = src.read()
+ diff['after'] = src_contents
else:
self._display.debug("source of file passed in")
diff['after_header'] = 'dynamically generated'
diff --git a/lib/ansible/plugins/action/assemble.py b/lib/ansible/plugins/action/assemble.py
index 454e28aa34..1f74ffe7bc 100644
--- a/lib/ansible/plugins/action/assemble.py
+++ b/lib/ansible/plugins/action/assemble.py
@@ -96,8 +96,7 @@ class ActionModule(ActionBase):
elif self._task._role is not None:
src = self._loader.path_dwim_relative(self._task._role._role_path, 'files', src)
else:
- # the source is local, so expand it here
- src = self._loader.path_dwim(os.path.expanduser(src))
+ src = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', src)
_re = None
if regexp is not None:
@@ -108,19 +107,15 @@ class ActionModule(ActionBase):
path_checksum = checksum_s(path)
dest = self._remote_expand_user(dest, tmp)
- remote_checksum = self._remote_checksum(tmp, dest)
+ remote_checksum = self._remote_checksum(tmp, dest, all_vars=task_vars)
+ diff = {}
if path_checksum != remote_checksum:
resultant = file(path).read()
- # FIXME: diff needs to be moved somewhere else
- #if self.runner.diff:
- # dest_result = self._execute_module(module_name='slurp', module_args=dict(path=dest), task_vars=task_vars, tmp=tmp, persist_files=True)
- # if 'content' in dest_result:
- # dest_contents = dest_result['content']
- # if dest_result['encoding'] == 'base64':
- # dest_contents = base64.b64decode(dest_contents)
- # else:
- # raise Exception("unknown encoding, failed: %s" % dest_result)
+
+ if self._play_context.diff:
+ diff = self._get_diff_data(tmp, dest, path, task_vars)
+
xfered = self._transfer_data('src', resultant)
# fix file permissions when the copy is done as a different user
@@ -139,8 +134,8 @@ class ActionModule(ActionBase):
)
res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
- # FIXME: diff stuff
- #res.diff = dict(after=resultant)
+ if diff:
+ res['diff'] = diff
return res
else:
new_module_args = self._task.args.copy()
diff --git a/lib/ansible/plugins/action/copy.py b/lib/ansible/plugins/action/copy.py
index 1474e476ca..8178d7eb0b 100644
--- a/lib/ansible/plugins/action/copy.py
+++ b/lib/ansible/plugins/action/copy.py
@@ -42,6 +42,7 @@ class ActionModule(ActionBase):
raw = boolean(self._task.args.get('raw', 'no'))
force = boolean(self._task.args.get('force', 'yes'))
faf = self._task.first_available_file
+ remote_src = boolean(self._task.args.get('remote_src', False))
if (source is None and content is None and faf is None) or dest is None:
return dict(failed=True, msg="src (or content) and dest are required")
@@ -53,7 +54,7 @@ class ActionModule(ActionBase):
# Check if the source ends with a "/"
source_trailing_slash = False
if source:
- source_trailing_slash = source.endswith(os.sep)
+ source_trailing_slash = self._connection._shell.path_has_trailing_slash(source)
# Define content_tempfile in case we set it after finding content populated.
content_tempfile = None
@@ -77,11 +78,17 @@ class ActionModule(ActionBase):
source = self._get_first_available_file(faf, task_vars.get('_original_file', None))
if source is None:
return dict(failed=True, msg="could not find src in first_available_file list")
+
+ elif remote_src:
+ new_module_args = self._task.args.copy()
+ del new_module_args['remote_src']
+ return self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=False)
+
else:
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
else:
- source = self._loader.path_dwim(source)
+ source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)
# A list of source file tuples (full_path, relative_path) which will try to copy to the destination
source_files = []
@@ -109,7 +116,6 @@ class ActionModule(ActionBase):
source_files.append((source, os.path.basename(source)))
changed = False
- diffs = []
module_result = {"changed": False}
# A register for if we executed a module.
@@ -127,6 +133,7 @@ class ActionModule(ActionBase):
# expand any user home dir specifier
dest = self._remote_expand_user(dest, tmp)
+ diffs = []
for source_full, source_rel in source_files:
# Generate a hash of the local file.
@@ -145,7 +152,7 @@ class ActionModule(ActionBase):
dest_file = self._connection._shell.join_path(dest)
# Attempt to get the remote checksum
- remote_checksum = self._remote_checksum(tmp, dest_file)
+ remote_checksum = self._remote_checksum(tmp, dest_file, all_vars=task_vars)
if remote_checksum == '3':
# The remote_checksum was executed on a directory.
@@ -156,7 +163,7 @@ class ActionModule(ActionBase):
else:
# Append the relative source location to the destination and retry remote_checksum
dest_file = self._connection._shell.join_path(dest, source_rel)
- remote_checksum = self._remote_checksum(tmp, dest_file)
+ remote_checksum = self._remote_checksum(tmp, dest_file, all_vars=task_vars)
if remote_checksum != '1' and not force:
# remote_file does not exist so continue to next iteration.
@@ -182,7 +189,7 @@ class ActionModule(ActionBase):
continue
# Define a remote directory that we will copy the file to.
- tmp_src = tmp + 'source'
+ tmp_src = self._connection._shell.join_path(tmp, 'source')
if not raw:
self._connection.put_file(source_full, tmp_src)
@@ -256,14 +263,13 @@ class ActionModule(ActionBase):
if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed):
self._remove_tmp_path(tmp)
- # TODO: Support detailed status/diff for multiple files
if module_executed and len(source_files) == 1:
result = module_return
else:
result = dict(dest=dest, src=source, changed=changed)
- if len(diffs) == 1:
- result['diff']=diffs[0]
+ if diffs:
+ result['diff'] = diffs
return result
diff --git a/lib/ansible/plugins/action/fetch.py b/lib/ansible/plugins/action/fetch.py
index 9d62a7b978..8c9a2ed16d 100644
--- a/lib/ansible/plugins/action/fetch.py
+++ b/lib/ansible/plugins/action/fetch.py
@@ -55,7 +55,7 @@ class ActionModule(ActionBase):
source = self._remote_expand_user(source, tmp)
# calculate checksum for the remote file
- remote_checksum = self._remote_checksum(tmp, source)
+ remote_checksum = self._remote_checksum(tmp, source, all_vars=task_vars)
# use slurp if sudo and permissions are lacking
remote_data = None
@@ -78,6 +78,7 @@ class ActionModule(ActionBase):
# calculate the destination name
if os.path.sep not in self._connection._shell.join_path('a', ''):
+ source = self._connection._shell._unquote(source)
source_local = source.replace('\\', '/')
else:
source_local = source
diff --git a/lib/ansible/plugins/action/include_vars.py b/lib/ansible/plugins/action/include_vars.py
index 37b4bff1d3..31d93e7acc 100644
--- a/lib/ansible/plugins/action/include_vars.py
+++ b/lib/ansible/plugins/action/include_vars.py
@@ -36,7 +36,7 @@ class ActionModule(ActionBase):
if self._task._role:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'vars', source)
else:
- source = self._loader.path_dwim(source)
+ source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'vars', source)
if os.path.exists(source):
(data, show_content) = self._loader._get_file_contents(source)
diff --git a/lib/ansible/plugins/action/normal.py b/lib/ansible/plugins/action/normal.py
index 763b1d5ea7..9ea962a240 100644
--- a/lib/ansible/plugins/action/normal.py
+++ b/lib/ansible/plugins/action/normal.py
@@ -23,7 +23,7 @@ class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
- results = self._execute_module(tmp, task_vars=task_vars)
+ results = self._execute_module(tmp=tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
diff --git a/lib/ansible/plugins/action/package.py b/lib/ansible/plugins/action/package.py
index fa412d45db..9488b9f108 100644
--- a/lib/ansible/plugins/action/package.py
+++ b/lib/ansible/plugins/action/package.py
@@ -38,7 +38,10 @@ class ActionModule(ActionBase):
pass # could not get it from template!
if module == 'auto':
- module = self._execute_module(module_name=setup, module_args={filter: 'ansible_pkg_mgr'}, task_vars=task_vars)
+ facts = self._execute_module(module_name='setup', module_args=dict(filter='ansible_pkg_mgr'), task_vars=task_vars)
+ self._display.degug("Facts %s" % facts)
+ if not 'failed' in facts:
+ module = getattr(facts['ansible_facts'], 'ansible_pkg_mgr', 'auto')
if module != 'auto':
# run the 'package' module
@@ -46,6 +49,7 @@ class ActionModule(ActionBase):
if 'use' in new_module_args:
del new_module_args['use']
+ self._display.vvvv("Running %s" % module)
return self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars)
else:
diff --git a/lib/ansible/plugins/action/patch.py b/lib/ansible/plugins/action/patch.py
index 65417e50c3..5355ad6823 100644
--- a/lib/ansible/plugins/action/patch.py
+++ b/lib/ansible/plugins/action/patch.py
@@ -41,7 +41,7 @@ class ActionModule(ActionBase):
if self._task._role is not None:
src = self._loader.path_dwim_relative(self._task._role._role_path, 'files', src)
else:
- src = self._loader.path_dwim(src)
+ src = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', src)
# create the remote tmp dir if needed, and put the source file there
if tmp is None or "-tmp-" not in tmp:
diff --git a/lib/ansible/plugins/action/pause.py b/lib/ansible/plugins/action/pause.py
index c5a97d5366..4eeaa6657e 100644
--- a/lib/ansible/plugins/action/pause.py
+++ b/lib/ansible/plugins/action/pause.py
@@ -18,14 +18,22 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
+import signal
import sys
+import termios
import time
+import tty
-from termios import tcflush, TCIFLUSH
-
+from os import isatty
from ansible.errors import *
from ansible.plugins.action import ActionBase
+class AnsibleTimeoutExceeded(Exception):
+ pass
+
+def timeout_handler(signum, frame):
+ raise AnsibleTimeoutExceeded
+
class ActionModule(ActionBase):
''' pauses execution for a length or time, or until input is received '''
@@ -48,14 +56,10 @@ class ActionModule(ActionBase):
delta = None,
)
- # FIXME: not sure if we can get this info directly like this anymore?
- #hosts = ', '.join(self.runner.host_set)
-
# Is 'args' empty, then this is the default prompted pause
if self._task.args is None or len(self._task.args.keys()) == 0:
pause_type = 'prompt'
- #prompt = "[%s]\nPress enter to continue:\n" % hosts
- prompt = "[%s]\nPress enter to continue:\n" % self._task.get_name().strip()
+ prompt = "[%s]\nPress enter to continue:" % self._task.get_name().strip()
# Are 'minutes' or 'seconds' keys that exist in 'args'?
elif 'minutes' in self._task.args or 'seconds' in self._task.args:
@@ -76,51 +80,76 @@ class ActionModule(ActionBase):
# Is 'prompt' a key in 'args'?
elif 'prompt' in self._task.args:
pause_type = 'prompt'
- #prompt = "[%s]\n%s:\n" % (hosts, self._task.args['prompt'])
- prompt = "[%s]\n%s:\n" % (self._task.get_name().strip(), self._task.args['prompt'])
+ prompt = "[%s]\n%s:" % (self._task.get_name().strip(), self._task.args['prompt'])
- # I have no idea what you're trying to do. But it's so wrong.
else:
+ # I have no idea what you're trying to do. But it's so wrong.
return dict(failed=True, msg="invalid pause type given. must be one of: %s" % ", ".join(self.PAUSE_TYPES))
- #vv("created 'pause' ActionModule: pause_type=%s, duration_unit=%s, calculated_seconds=%s, prompt=%s" % \
- # (self.pause_type, self.duration_unit, self.seconds, self.prompt))
-
########################################################################
# Begin the hard work!
start = time.time()
result['start'] = str(datetime.datetime.now())
-
-
- # FIXME: this is all very broken right now, as prompting from the worker side
- # is not really going to be supported, and actions marked as BYPASS_HOST_LOOP
- # probably should not be run through the executor engine at all. Also, ctrl+c
- # is now captured on the parent thread, so it can't be caught here via the
- # KeyboardInterrupt exception.
+ result['user_input'] = ''
try:
- if not pause_type == 'prompt':
- print("(^C-c = continue early, ^C-a = abort)")
- #print("[%s]\nPausing for %s seconds" % (hosts, seconds))
- print("[%s]\nPausing for %s seconds" % (self._task.get_name().strip(), seconds))
- time.sleep(seconds)
+ if seconds is not None:
+ # setup the alarm handler
+ signal.signal(signal.SIGALRM, timeout_handler)
+ signal.alarm(seconds)
+ # show the prompt
+ self._display.display("Pausing for %d seconds" % seconds)
+ self._display.display("(ctrl+C then 'C' = continue early, ctrl+C then 'A' = abort)\r"),
else:
- # Clear out any unflushed buffered input which would
- # otherwise be consumed by raw_input() prematurely.
- #tcflush(sys.stdin, TCIFLUSH)
- result['user_input'] = raw_input(prompt.encode(sys.stdout.encoding))
- except KeyboardInterrupt:
+ self._display.display(prompt)
+
+ # save the attributes on the existing (duped) stdin so
+ # that we can restore them later after we set raw mode
+ fd = self._connection._new_stdin.fileno()
+ if isatty(fd):
+ old_settings = termios.tcgetattr(fd)
+ tty.setraw(fd)
+
+ # flush the buffer to make sure no previous key presses
+ # are read in below
+ termios.tcflush(self._connection._new_stdin, termios.TCIFLUSH)
+
while True:
- print('\nAction? (a)bort/(c)ontinue: ')
- c = getch()
- if c == 'c':
- # continue playbook evaluation
- break
- elif c == 'a':
- # abort further playbook evaluation
- raise ae('user requested abort!')
+ try:
+ key_pressed = self._connection._new_stdin.read(1)
+ if key_pressed == '\x03':
+ raise KeyboardInterrupt
+
+ if not seconds:
+ if not isatty(fd):
+ self._display.warning("Not waiting from prompt as stdin is not interactive")
+ break
+ # read key presses and act accordingly
+ if key_pressed == '\r':
+ break
+ else:
+ result['user_input'] += key_pressed
+
+ except KeyboardInterrupt:
+ if seconds is not None:
+ signal.alarm(0)
+ self._display.display("Press 'C' to continue the play or 'A' to abort \r"),
+ if self._c_or_a():
+ break
+ else:
+ raise AnsibleError('user requested abort!')
+
+ except AnsibleTimeoutExceeded:
+ # this is the exception we expect when the alarm signal
+ # fires, so we simply ignore it to move into the cleanup
+ pass
finally:
+ # cleanup and save some information
+ # restore the old settings for the duped stdin fd
+ if isatty(fd):
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+
duration = time.time() - start
result['stop'] = str(datetime.datetime.now())
result['delta'] = int(duration)
@@ -129,8 +158,14 @@ class ActionModule(ActionBase):
duration = round(duration / 60.0, 2)
else:
duration = round(duration, 2)
-
result['stdout'] = "Paused for %s %s" % (duration, duration_unit)
return result
+ def _c_or_a(self):
+ while True:
+ key_pressed = self._connection._new_stdin.read(1)
+ if key_pressed.lower() == 'a':
+ return False
+ elif key_pressed.lower() == 'c':
+ return True
diff --git a/lib/ansible/plugins/action/script.py b/lib/ansible/plugins/action/script.py
index 2392851110..d2fbf21cf0 100644
--- a/lib/ansible/plugins/action/script.py
+++ b/lib/ansible/plugins/action/script.py
@@ -65,7 +65,7 @@ class ActionModule(ActionBase):
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
else:
- source = self._loader.path_dwim(source)
+ source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)
# transfer the file to a remote tmp location
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source))
@@ -83,8 +83,8 @@ class ActionModule(ActionBase):
# add preparation steps to one ssh roundtrip executing the script
env_string = self._compute_environment_string()
script_cmd = ' '.join([env_string, tmp_src, args])
-
- result = self._low_level_execute_command(cmd=script_cmd, tmp=None, sudoable=sudoable)
+
+ result = self._low_level_execute_command(cmd=script_cmd, tmp=None, sudoable=True)
# clean up after
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
diff --git a/lib/ansible/plugins/action/service.py b/lib/ansible/plugins/action/service.py
new file mode 100644
index 0000000000..fc1704c386
--- /dev/null
+++ b/lib/ansible/plugins/action/service.py
@@ -0,0 +1,60 @@
+# (c) 2015, Ansible Inc,
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible.plugins.action import ActionBase
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = False
+
+ def run(self, tmp=None, task_vars=dict()):
+ ''' handler for package operations '''
+
+ name = self._task.args.get('name', None)
+ state = self._task.args.get('state', None)
+ module = self._task.args.get('use', 'auto')
+
+ if module == 'auto':
+ try:
+ module = self._templar.template('{{ansible_service_mgr}}')
+ except:
+ pass # could not get it from template!
+
+ if module == 'auto':
+ facts = self._execute_module(module_name='setup', module_args=dict(filter='ansible_service_mgr'), task_vars=task_vars)
+ self._display.debug("Facts %s" % facts)
+ if not 'failed' in facts:
+ module = getattr(facts['ansible_facts'], 'ansible_service_mgr', 'auto')
+
+ if not module or module == 'auto':
+ module = 'service'
+
+ if module != 'auto':
+ # run the 'service' module
+ new_module_args = self._task.args.copy()
+ if 'use' in new_module_args:
+ del new_module_args['use']
+
+ self._display.vvvv("Running %s" % module)
+ return self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars)
+
+ else:
+
+ return {'failed': True, 'msg': 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.'}
diff --git a/lib/ansible/plugins/action/set_fact.py b/lib/ansible/plugins/action/set_fact.py
index 10ff6f2322..5822fb3f08 100644
--- a/lib/ansible/plugins/action/set_fact.py
+++ b/lib/ansible/plugins/action/set_fact.py
@@ -17,10 +17,45 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import ast
+
+from six import string_types
+
from ansible.errors import AnsibleError
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
+def isidentifier(ident):
+ """
+ Determines, if string is valid Python identifier using the ast module.
+ Orignally posted at: http://stackoverflow.com/a/29586366
+ """
+
+ if not isinstance(ident, string_types):
+ return False
+
+ try:
+ root = ast.parse(ident)
+ except SyntaxError:
+ return False
+
+ if not isinstance(root, ast.Module):
+ return False
+
+ if len(root.body) != 1:
+ return False
+
+ if not isinstance(root.body[0], ast.Expr):
+ return False
+
+ if not isinstance(root.body[0].value, ast.Name):
+ return False
+
+ if root.body[0].value.id != ident:
+ return False
+
+ return True
+
class ActionModule(ActionBase):
TRANSFERS_FILES = False
@@ -30,6 +65,10 @@ class ActionModule(ActionBase):
if self._task.args:
for (k, v) in self._task.args.iteritems():
k = self._templar.template(k)
+
+ if not isidentifier(k):
+ return dict(failed=True, msg="The variable name '%s' is not valid. Variables must start with a letter or underscore character, and contain only letters, numbers and underscores." % k)
+
if isinstance(v, basestring) and v.lower() in ('true', 'false', 'yes', 'no'):
v = boolean(v)
facts[k] = v
diff --git a/lib/ansible/plugins/action/synchronize.py b/lib/ansible/plugins/action/synchronize.py
index ec1f938ffc..79de84238d 100644
--- a/lib/ansible/plugins/action/synchronize.py
+++ b/lib/ansible/plugins/action/synchronize.py
@@ -21,8 +21,9 @@ __metaclass__ = type
import os.path
from ansible.plugins.action import ActionBase
+from ansible.plugins import connection_loader
from ansible.utils.boolean import boolean
-from ansible import constants
+from ansible import constants as C
class ActionModule(ActionBase):
@@ -30,7 +31,12 @@ class ActionModule(ActionBase):
def _get_absolute_path(self, path):
if self._task._role is not None:
original_path = path
- path = self._loader.path_dwim_relative(self._task._role._role_path, 'files', path)
+
+ if self._task._role is not None:
+ path = self._loader.path_dwim_relative(self._task._role._role_path, 'files', path)
+ else:
+ path = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', path)
+
if original_path and original_path[-1] == '/' and path[-1] != '/':
# make sure the dwim'd path ends in a trailing "/"
# if the original path did
@@ -40,100 +46,127 @@ class ActionModule(ActionBase):
def _process_origin(self, host, path, user):
- if not host in ['127.0.0.1', 'localhost']:
+ if host not in C.LOCALHOST:
if user:
return '%s@%s:%s' % (user, host, path)
else:
return '%s:%s' % (host, path)
- else:
- if not ':' in path:
- if not path.startswith('/'):
- path = self._get_absolute_path(path=path)
- return path
+
+ if ':' not in path and not path.startswith('/'):
+ path = self._get_absolute_path(path=path)
+ return path
def _process_remote(self, host, path, user):
transport = self._play_context.connection
- return_data = None
- if not host in ['127.0.0.1', 'localhost'] or transport != "local":
+ if host not in C.LOCALHOST or transport != "local":
if user:
- return_data = '%s@%s:%s' % (user, host, path)
+ return '%s@%s:%s' % (user, host, path)
else:
- return_data = '%s:%s' % (host, path)
- else:
- return_data = path
+ return '%s:%s' % (host, path)
- if not ':' in return_data:
- if not return_data.startswith('/'):
- return_data = self._get_absolute_path(path=return_data)
+ if ':' not in path and not path.startswith('/'):
+ path = self._get_absolute_path(path=path)
+ return path
- return return_data
+ def _override_module_replaced_vars(self, task_vars):
+ """ Some vars are substituted into the modules. Have to make sure
+ that those are correct for localhost when synchronize creates its own
+ connection to localhost."""
+
+ # Clear the current definition of these variables as they came from the
+ # connection to the remote host
+ if 'ansible_syslog_facility' in task_vars:
+ del task_vars['ansible_syslog_facility']
+ for key in task_vars:
+ if key.startswith("ansible_") and key.endswith("_interpreter"):
+ del task_vars[key]
+
+ # Add the definitions from localhost
+ localhost = task_vars['hostvars']['127.0.0.1']
+ if 'ansible_syslog_facility' in localhost:
+ task_vars['ansible_syslog_facility'] = localhost['ansible_syslog_facility']
+ for key in localhost:
+ if key.startswith("ansible_") and key.endswith("_interpreter"):
+ task_vars[key] = localhost[key]
def run(self, tmp=None, task_vars=dict()):
''' generates params and passes them on to the rsync module '''
original_transport = task_vars.get('ansible_connection') or self._play_context.connection
- transport_overridden = False
- if task_vars.get('delegate_to') is None:
- task_vars['delegate_to'] = '127.0.0.1'
- # IF original transport is not local, override transport and disable sudo.
- if original_transport != 'local':
- task_vars['ansible_connection'] = 'local'
- transport_overridden = True
- self._play_context.become = False
-
- src = self._task.args.get('src', None)
- dest = self._task.args.get('dest', None)
+ remote_transport = False
+ if original_transport != 'local':
+ remote_transport = True
+
+ try:
+ delegate_to = self._play_context.delegate_to
+ except (AttributeError, KeyError):
+ delegate_to = None
+
use_ssh_args = self._task.args.pop('use_ssh_args', None)
- # FIXME: this doesn't appear to be used anywhere?
- local_rsync_path = task_vars.get('ansible_rsync_path')
+ # Parameter name needed by the ansible module
+ self._task.args['_local_rsync_path'] = task_vars.get('ansible_rsync_path') or 'rsync'
- # from the perspective of the rsync call the delegate is the localhost
- src_host = '127.0.0.1'
+ # rsync thinks that one end of the connection is localhost and the
+ # other is the host we're running the task for (Note: We use
+ # ansible's delegate_to mechanism to determine which host rsync is
+ # running on so localhost could be a non-controller machine if
+ # delegate_to is used)
+ src_host = '127.0.0.1'
dest_host = task_vars.get('ansible_ssh_host') or task_vars.get('inventory_hostname')
- # allow ansible_ssh_host to be templated
- dest_is_local = dest_host in ['127.0.0.1', 'localhost']
+ dest_is_local = dest_host in C.LOCALHOST
# CHECK FOR NON-DEFAULT SSH PORT
- dest_port = task_vars.get('ansible_ssh_port') or self._task.args.get('dest_port') or 22
+ if self._task.args.get('dest_port', None) is None:
+ inv_port = task_vars.get('ansible_ssh_port', None) or C.DEFAULT_REMOTE_PORT
+ if inv_port is not None:
+ self._task.args['dest_port'] = inv_port
- # edge case: explicit delegate and dest_host are the same
- if dest_host == task_vars.get('delegate_to'):
+ # Set use_delegate if we are going to run rsync on a delegated host
+ # instead of localhost
+ use_delegate = False
+ if dest_host == delegate_to:
+ # edge case: explicit delegate and dest_host are the same
+ # so we run rsync on the remote machine targetting its localhost
+ # (itself)
dest_host = '127.0.0.1'
-
- # SWITCH SRC AND DEST PER MODE
+ use_delegate = True
+ elif delegate_to is not None and remote_transport:
+ # If we're delegating to a remote host then we need to use the
+ # delegate_to settings
+ use_delegate = True
+
+ # Delegate to localhost as the source of the rsync unless we've been
+ # told (via delegate_to) that a different host is the source of the
+ # rsync
+ transport_overridden = False
+ if not use_delegate and remote_transport:
+ # Create a connection to localhost to run rsync on
+ new_stdin = self._connection._new_stdin
+ new_connection = connection_loader.get('local', self._play_context, new_stdin)
+ self._connection = new_connection
+ transport_overridden = True
+ self._override_module_replaced_vars(task_vars)
+
+ # COMPARE DELEGATE, HOST AND TRANSPORT
+ between_multiple_hosts = False
+ if dest_host != src_host and remote_transport:
+ # We're not copying two filesystem trees on the same host so we
+ # need to correctly format the paths for rsync (like
+ # user@host:path/to/tree
+ between_multiple_hosts = True
+
+ # SWITCH SRC AND DEST HOST PER MODE
if self._task.args.get('mode', 'push') == 'pull':
(dest_host, src_host) = (src_host, dest_host)
- # CHECK DELEGATE HOST INFO
- use_delegate = False
- # FIXME: not sure if this is in connection info yet or not...
- #if conn.delegate != conn.host:
- # if 'hostvars' in task_vars:
- # if conn.delegate in task_vars['hostvars'] and original_transport != 'local':
- # # use a delegate host instead of localhost
- # use_delegate = True
-
- # COMPARE DELEGATE, HOST AND TRANSPORT
- process_args = False
- if not dest_host is src_host and original_transport != 'local':
- # interpret and task_vars remote host info into src or dest
- process_args = True
-
# MUNGE SRC AND DEST PER REMOTE_HOST INFO
- if process_args or use_delegate:
-
- user = None
- if boolean(task_vars.get('set_remote_user', 'yes')):
- if use_delegate:
- user = task_vars['hostvars'][conn.delegate].get('ansible_ssh_user')
-
- if not use_delegate or not user:
- user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
-
+ src = self._task.args.get('src', None)
+ dest = self._task.args.get('dest', None)
+ if between_multiple_hosts or use_delegate:
+ # Private key handling
if use_delegate:
- # FIXME
private_key = task_vars.get('ansible_ssh_private_key_file') or self._play_context.private_key_file
else:
private_key = task_vars.get('ansible_ssh_private_key_file') or self._play_context.private_key_file
@@ -142,23 +175,41 @@ class ActionModule(ActionBase):
private_key = os.path.expanduser(private_key)
self._task.args['private_key'] = private_key
+ # Src and dest rsync "path" handling
+ # Determine if we need a user@
+ user = None
+ if boolean(self._task.args.get('set_remote_user', 'yes')):
+ if use_delegate:
+ if 'hostvars' in task_vars and delegate_to in task_vars['hostvars']:
+ user = task_vars['hostvars'][delegate_to].get('ansible_ssh_user', None)
+
+ if not use_delegate or not user:
+ user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
+
# use the mode to define src and dest's url
if self._task.args.get('mode', 'push') == 'pull':
# src is a remote path: <user>@<host>, dest is a local path
- src = self._process_remote(src_host, src, user)
+ src = self._process_remote(src_host, src, user)
dest = self._process_origin(dest_host, dest, user)
else:
# src is a local path, dest is a remote path: <user>@<host>
- src = self._process_origin(src_host, src, user)
+ src = self._process_origin(src_host, src, user)
dest = self._process_remote(dest_host, dest, user)
+ else:
+ # Still need to munge paths (to account for roles) even if we aren't
+ # copying files between hosts
+ if not src.startswith('/'):
+ src = self._get_absolute_path(path=src)
+ if not dest.startswith('/'):
+ dest = self._get_absolute_path(path=dest)
- self._task.args['src'] = src
- self._task.args['dest'] = dest
+ self._task.args['src'] = src
+ self._task.args['dest'] = dest
- # Allow custom rsync path argument.
+ # Allow custom rsync path argument
rsync_path = self._task.args.get('rsync_path', None)
- # If no rsync_path is set, sudo was originally set, and dest is remote then add 'sudo rsync' argument.
+ # If no rsync_path is set, sudo was originally set, and dest is remote then add 'sudo rsync' argument
if not rsync_path and transport_overridden and self._play_context.become and self._play_context.become_method == 'sudo' and not dest_is_local:
rsync_path = 'sudo rsync'
@@ -167,15 +218,14 @@ class ActionModule(ActionBase):
self._task.args['rsync_path'] = '"%s"' % rsync_path
if use_ssh_args:
- self._task.args['ssh_args'] = constants.ANSIBLE_SSH_ARGS
-
- # Remove mode as it is handled purely in this action module
- if 'mode' in self._task.args:
- del self._task.args['mode']
-
+ self._task.args['ssh_args'] = C.ANSIBLE_SSH_ARGS
# run the module and store the result
result = self._execute_module('synchronize', task_vars=task_vars)
+ if 'SyntaxError' in result['msg']:
+ # Emit a warning about using python3 because synchronize is
+ # somewhat unique in running on localhost
+ result['traceback'] = result['msg']
+ result['msg'] = 'SyntaxError parsing module. Perhaps invoking "python" on your local (or delegate_to) machine invokes python3. You can set ansible_python_interpreter for localhost (or the delegate_to machine) to the location of python2 to fix this'
return result
-
diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py
index 81b291ff49..7a68375a44 100644
--- a/lib/ansible/plugins/action/template.py
+++ b/lib/ansible/plugins/action/template.py
@@ -31,8 +31,8 @@ class ActionModule(ActionBase):
TRANSFERS_FILES = True
- def get_checksum(self, tmp, dest, try_directory=False, source=None):
- remote_checksum = self._remote_checksum(tmp, dest)
+ def get_checksum(self, tmp, dest, all_vars, try_directory=False, source=None):
+ remote_checksum = self._remote_checksum(tmp, dest, all_vars=all_vars)
if remote_checksum in ('0', '2', '3', '4'):
# Note: 1 means the file is not present which is fine; template
@@ -40,7 +40,7 @@ class ActionModule(ActionBase):
if try_directory and remote_checksum == '3' and source:
base = os.path.basename(source)
dest = os.path.join(dest, base)
- remote_checksum = self.get_checksum(tmp, dest, try_directory=False)
+ remote_checksum = self.get_checksum(tmp, dest, all_vars=all_vars, try_directory=False)
if remote_checksum not in ('0', '2', '3', '4'):
return remote_checksum
@@ -71,7 +71,7 @@ class ActionModule(ActionBase):
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'templates', source)
else:
- source = self._loader.path_dwim(source)
+ source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'templates', source)
# Expand any user home dir specification
dest = self._remote_expand_user(dest, tmp)
@@ -111,6 +111,11 @@ class ActionModule(ActionBase):
time.localtime(os.path.getmtime(source))
)
+ self._templar.environment.searchpath = [self._loader._basedir, os.path.dirname(source)]
+ if self._task._role is not None:
+ self._templar.environment.searchpath.insert(1, C.DEFAULT_ROLES_PATH)
+ self._templar.environment.searchpath.insert(1, self._task._role._role_path)
+
old_vars = self._templar._available_variables
self._templar.set_available_variables(temp_vars)
resultant = self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=False)
@@ -119,7 +124,7 @@ class ActionModule(ActionBase):
return dict(failed=True, msg=type(e).__name__ + ": " + str(e))
local_checksum = checksum_s(resultant)
- remote_checksum = self.get_checksum(tmp, dest, not directory_prepended, source=source)
+ remote_checksum = self.get_checksum(tmp, dest, task_vars, not directory_prepended, source=source)
if isinstance(remote_checksum, dict):
# Error from remote_checksum is a dict. Valid return is a str
return remote_checksum
diff --git a/lib/ansible/plugins/action/unarchive.py b/lib/ansible/plugins/action/unarchive.py
index 6a43259763..b5be23069a 100644
--- a/lib/ansible/plugins/action/unarchive.py
+++ b/lib/ansible/plugins/action/unarchive.py
@@ -61,9 +61,12 @@ class ActionModule(ActionBase):
if '_original_file' in task_vars:
source = self._loader.path_dwim_relative(task_vars['_original_file'], 'files', source)
else:
- source = self._loader.path_dwim(source)
+ if self._task._role is not None:
+ source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
+ else:
+ source = self._loader.path_dwim_relative(tself._loader.get_basedir(), 'files', source)
- remote_checksum = self._remote_checksum(tmp, dest)
+ remote_checksum = self._remote_checksum(tmp, dest, all_vars=task_vars)
if remote_checksum != '3':
return dict(failed=True, msg="dest '%s' must be an existing dir" % dest)
elif remote_checksum == '4':
diff --git a/lib/ansible/plugins/action/win_copy.py b/lib/ansible/plugins/action/win_copy.py
new file mode 100644
index 0000000000..54d94e12e6
--- /dev/null
+++ b/lib/ansible/plugins/action/win_copy.py
@@ -0,0 +1,28 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.plugins.action import ActionBase
+from ansible.plugins.action.copy import ActionModule as CopyActionModule
+
+# Even though CopyActionModule inherits from ActionBase, we still need to
+# directly inherit from ActionBase to appease the plugin loader.
+class ActionModule(CopyActionModule, ActionBase):
+ pass
diff --git a/lib/ansible/plugins/action/win_template.py b/lib/ansible/plugins/action/win_template.py
new file mode 100644
index 0000000000..03091d494f
--- /dev/null
+++ b/lib/ansible/plugins/action/win_template.py
@@ -0,0 +1,28 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.plugins.action import ActionBase
+from ansible.plugins.action.template import ActionModule as TemplateActionModule
+
+# Even though TemplateActionModule inherits from ActionBase, we still need to
+# directly inherit from ActionBase to appease the plugin loader.
+class ActionModule(TemplateActionModule, ActionBase):
+ pass
diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py
index 841f345db4..25bc1a72ac 100644
--- a/lib/ansible/plugins/callback/__init__.py
+++ b/lib/ansible/plugins/callback/__init__.py
@@ -22,6 +22,7 @@ __metaclass__ = type
import json
import difflib
import warnings
+from copy import deepcopy
from six import string_types
@@ -50,11 +51,14 @@ class CallbackBase:
version = getattr(self, 'CALLBACK_VERSION', '1.0')
self._display.vvvv('Loaded callback %s of type %s, v%s' % (name, ctype, version))
- def _dump_results(self, result, indent=4, sort_keys=True):
+ def _dump_results(self, result, indent=None, sort_keys=True):
if result.get('_ansible_no_log', False):
return json.dumps(dict(censored="the output has been hidden due to the fact that 'no_log: true' was specified for this result"))
+ if not indent and '_ansible_verbose_always' in result and result['_ansible_verbose_always']:
+ indent = 4
+
# All result keys stating with _ansible_ are internal, so remove them from the result before we output anything.
for k in result.keys():
if isinstance(k, string_types) and k.startswith('_ansible_'):
@@ -68,34 +72,54 @@ class CallbackBase:
for warning in res['warnings']:
self._display.warning(warning)
- def _get_diff(self, diff):
- try:
- with warnings.catch_warnings():
- warnings.simplefilter('ignore')
- ret = []
- if 'dst_binary' in diff:
- ret.append("diff skipped: destination file appears to be binary\n")
- if 'src_binary' in diff:
- ret.append("diff skipped: source file appears to be binary\n")
- if 'dst_larger' in diff:
- ret.append("diff skipped: destination file size is greater than %d\n" % diff['dst_larger'])
- if 'src_larger' in diff:
- ret.append("diff skipped: source file size is greater than %d\n" % diff['src_larger'])
- if 'before' in diff and 'after' in diff:
- if 'before_header' in diff:
- before_header = "before: %s" % diff['before_header']
- else:
- before_header = 'before'
- if 'after_header' in diff:
- after_header = "after: %s" % diff['after_header']
- else:
- after_header = 'after'
- differ = difflib.unified_diff(to_unicode(diff['before']).splitlines(True), to_unicode(diff['after']).splitlines(True), before_header, after_header, '', '', 10)
- for line in list(differ):
- ret.append(line)
- return u"".join(ret)
- except UnicodeDecodeError:
- return ">> the files are different, but the diff library cannot compare unicode strings"
+ def _get_diff(self, difflist):
+
+ if not isinstance(difflist, list):
+ difflist = [difflist]
+
+ ret = []
+ for diff in difflist:
+ try:
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore')
+ ret = []
+ if 'dst_binary' in diff:
+ ret.append("diff skipped: destination file appears to be binary\n")
+ if 'src_binary' in diff:
+ ret.append("diff skipped: source file appears to be binary\n")
+ if 'dst_larger' in diff:
+ ret.append("diff skipped: destination file size is greater than %d\n" % diff['dst_larger'])
+ if 'src_larger' in diff:
+ ret.append("diff skipped: source file size is greater than %d\n" % diff['src_larger'])
+ if 'before' in diff and 'after' in diff:
+ if 'before_header' in diff:
+ before_header = "before: %s" % diff['before_header']
+ else:
+ before_header = 'before'
+ if 'after_header' in diff:
+ after_header = "after: %s" % diff['after_header']
+ else:
+ after_header = 'after'
+ differ = difflib.unified_diff(to_unicode(diff['before']).splitlines(True), to_unicode(diff['after']).splitlines(True), before_header, after_header, '', '', 10)
+ ret.extend(list(differ))
+ ret.append('\n')
+ return u"".join(ret)
+ except UnicodeDecodeError:
+ ret.append(">> the files are different, but the diff library cannot compare unicode strings\n\n")
+
+ def _process_items(self, result):
+
+ for res in result._result['results']:
+ newres = deepcopy(result)
+ newres._result = res
+ if 'failed' in res and res['failed']:
+ self.v2_playbook_item_on_failed(newres)
+ elif 'skipped' in res and res['skipped']:
+ self.v2_playbook_item_on_skipped(newres)
+ else:
+ self.v2_playbook_item_on_ok(newres)
+
+ #del result._result['results']
def set_play_context(self, play_context):
pass
@@ -254,3 +278,12 @@ class CallbackBase:
host = result._host.get_name()
if 'diff' in result._result:
self.on_file_diff(host, result._result['diff'])
+
+ def v2_playbook_on_item_ok(self, result):
+ pass # no v1
+
+ def v2_playbook_on_item_failed(self, result):
+ pass # no v1
+
+ def v2_playbook_on_item_skipped(self, result):
+ pass # no v1
diff --git a/lib/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py
index c56e300468..c963137968 100644
--- a/lib/ansible/plugins/callback/default.py
+++ b/lib/ansible/plugins/callback/default.py
@@ -46,7 +46,10 @@ class CallbackModule(CallbackBase):
# finally, remove the exception from the result so it's not shown every time
del result._result['exception']
- self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red')
+ if result._task.loop and 'results' in result._result:
+ self._process_items(result)
+ else:
+ self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red')
if result._task.ignore_errors:
self._display.display("...ignoring", color='cyan')
@@ -63,9 +66,13 @@ class CallbackModule(CallbackBase):
msg = "ok: [%s]" % result._host.get_name()
color = 'green'
- if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result and result._task.action != 'include':
- msg += " => %s" % self._dump_results(result._result)
- self._display.display(msg, color=color)
+ if result._task.loop and 'results' in result._result:
+ self._process_items(result)
+ else:
+
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result and result._task.action != 'include':
+ msg += " => %s" % self._dump_results(result._result)
+ self._display.display(msg, color=color)
self._handle_warnings(result._result)
@@ -103,5 +110,47 @@ class CallbackModule(CallbackBase):
self._display.banner(msg)
def v2_on_file_diff(self, result):
- if 'diff' in result._result:
+ if 'diff' in result._result and result._result['diff']:
self._display.display(self._get_diff(result._result['diff']))
+
+ def v2_playbook_item_on_ok(self, result):
+
+ if result._task.action == 'include':
+ msg = 'included: %s for %s' % (result._task.args.get('_raw_params'), result._host.name)
+ color = 'cyan'
+ elif result._result.get('changed', False):
+ msg = "changed: [%s]" % result._host.get_name()
+ color = 'yellow'
+ else:
+ msg = "ok: [%s]" % result._host.get_name()
+ color = 'green'
+
+ msg += " => (item=%s)" % result._result['item']
+
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result and result._task.action != 'include':
+ msg += " => %s" % self._dump_results(result._result)
+ self._display.display(msg, color=color)
+
+ def v2_playbook_item_on_failed(self, result):
+ if 'exception' in result._result:
+ if self._display.verbosity < 3:
+ # extract just the actual error message from the exception text
+ error = result._result['exception'].strip().split('\n')[-1]
+ msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
+ else:
+ msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
+
+ self._display.display(msg, color='red')
+
+ # finally, remove the exception from the result so it's not shown every time
+ del result._result['exception']
+
+ self._display.display("failed: [%s] => (item=%s) => %s" % (result._host.get_name(), result._result['item'], self._dump_results(result._result)), color='red')
+ self._handle_warnings(result._result)
+
+ def v2_playbook_item_on_skipped(self, result):
+ msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), result._result['item'])
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
+ msg += " => %s" % self._dump_results(result._result)
+ self._display.display(msg, color='cyan')
+
diff --git a/lib/ansible/plugins/callback/minimal.py b/lib/ansible/plugins/callback/minimal.py
index c14107694b..60c0ddbfe8 100644
--- a/lib/ansible/plugins/callback/minimal.py
+++ b/lib/ansible/plugins/callback/minimal.py
@@ -39,7 +39,7 @@ class CallbackModule(CallbackBase):
buf = "%s | %s | rc=%s >>\n" % (host, caption, result.get('rc',0))
buf += result.get('stdout','')
- buf += result.get('stdout','')
+ buf += result.get('stderr','')
buf += result.get('msg','')
return buf + "\n"
@@ -61,13 +61,13 @@ class CallbackModule(CallbackBase):
if result._task.action in C.MODULE_NO_JSON:
self._display.display(self._command_generic_msg(result._host.get_name(), result._result,"FAILED"), color='red')
else:
- self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red')
+ self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color='red')
def v2_runner_on_ok(self, result):
if result._task.action in C.MODULE_NO_JSON:
self._display.display(self._command_generic_msg(result._host.get_name(), result._result,"SUCCESS"), color='green')
else:
- self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result)), color='green')
+ self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color='green')
self._handle_warnings(result._result)
def v2_runner_on_skipped(self, result):
@@ -77,5 +77,5 @@ class CallbackModule(CallbackBase):
self._display.display("%s | UNREACHABLE!" % result._host.get_name(), color='yellow')
def v2_on_file_diff(self, result):
- if 'diff' in result._result:
+ if 'diff' in result._result and result._result['diff']:
self._display.display(self._get_diff(result._result['diff']))
diff --git a/lib/ansible/plugins/callback/skippy.py b/lib/ansible/plugins/callback/skippy.py
index db742a46ac..4502da0a63 100644
--- a/lib/ansible/plugins/callback/skippy.py
+++ b/lib/ansible/plugins/callback/skippy.py
@@ -46,7 +46,10 @@ class CallbackModule(CallbackBase):
# finally, remove the exception from the result so it's not shown every time
del result._result['exception']
- self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red')
+ if result._task.loop and 'results' in result._result:
+ self._process_items(result)
+ else:
+ self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color='red')
if result._task.ignore_errors:
self._display.display("...ignoring", color='cyan')
@@ -63,10 +66,13 @@ class CallbackModule(CallbackBase):
msg = "ok: [%s]" % result._host.get_name()
color = 'green'
- if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result and result._task.action != 'include':
- msg += " => %s" % self._dump_results(result._result)
+ if result._task.loop and 'results' in result._result:
+ self._process_items(result)
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result and result._task.action != 'include':
+ msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=color)
+
self._handle_warnings(result._result)
def v2_runner_on_unreachable(self, result):
@@ -94,4 +100,50 @@ class CallbackModule(CallbackBase):
else:
msg = "PLAY [%s]" % name
- self._display.banner(name)
+ self._display.banner(msg)
+
+ def v2_on_file_diff(self, result):
+ if 'diff' in result._result and result._result['diff']:
+ self._display.display(self._get_diff(result._result['diff']))
+
+ def v2_playbook_item_on_ok(self, result):
+
+ if result._task.action == 'include':
+ msg = 'included: %s for %s' % (result._task.args.get('_raw_params'), result._host.name)
+ color = 'cyan'
+ elif result._result.get('changed', False):
+ msg = "changed: [%s]" % result._host.get_name()
+ color = 'yellow'
+ else:
+ msg = "ok: [%s]" % result._host.get_name()
+ color = 'green'
+
+ msg += " => (item=%s)" % result._result['item']
+
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result and result._task.action != 'include':
+ msg += " => %s" % self._dump_results(result._result)
+ self._display.display(msg, color=color)
+
+ def v2_playbook_item_on_failed(self, result):
+ if 'exception' in result._result:
+ if self._display.verbosity < 3:
+ # extract just the actual error message from the exception text
+ error = result._result['exception'].strip().split('\n')[-1]
+ msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
+ else:
+ msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
+
+ self._display.display(msg, color='red')
+
+ # finally, remove the exception from the result so it's not shown every time
+ del result._result['exception']
+
+ self._display.display("failed: [%s] => (item=%s) => %s" % (result._host.get_name(), result._result['item'], self._dump_results(result._result)), color='red')
+ self._handle_warnings(result._result)
+
+ def v2_playbook_item_on_skipped(self, result):
+ msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), result._result['item'])
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
+ msg += " => %s" % self._dump_results(result._result)
+ self._display.display(msg, color='cyan')
+
diff --git a/lib/ansible/plugins/callback/tree.py b/lib/ansible/plugins/callback/tree.py
new file mode 100644
index 0000000000..717afd6bdf
--- /dev/null
+++ b/lib/ansible/plugins/callback/tree.py
@@ -0,0 +1,68 @@
+# (c) 2012-2014, Ansible, Inc
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+
+from ansible.plugins.callback import CallbackBase
+from ansible.utils.path import makedirs_safe
+from ansible.constants import TREE_DIR
+
+
+class CallbackModule(CallbackBase):
+ '''
+ This callback puts results into a host specific file in a directory in json format.
+ '''
+
+ CALLBACK_VERSION = 2.0
+ CALLBACK_TYPE = 'aggregate'
+ CALLBACK_NAME = 'tree'
+
+ def __init__(self, display):
+ super(CallbackModule, self).__init__(display)
+
+ self.tree = TREE_DIR
+ if not self.tree:
+ self._display.warnings("Disabling tree callback, invalid directory provided to tree option: %s" % self.tree)
+
+ def write_tree_file(self, hostname, buf):
+ ''' write something into treedir/hostname '''
+
+ try:
+ makedirs_safe(self.tree)
+ path = os.path.join(self.tree, hostname)
+ fd = open(path, "w+")
+ fd.write(buf)
+ fd.close()
+ except (OSError, IOError) as e:
+ self._display.warnings("Unable to write to %s's file: %s" % (hostname, str(e)))
+
+ def result_to_tree(self, result):
+ if self.tree:
+ self.write_tree_file(result._host.get_name(), self._dump_results(result._result))
+
+ def v2_runner_on_ok(self, result):
+ self.result_to_tree(result)
+
+ def v2_runner_on_failed(self, result):
+ self.result_to_tree(result)
+
+ def v2_runner_on_unreachable(self, result):
+ self.result_to_tree(result)
+
diff --git a/lib/ansible/plugins/connections/chroot.py b/lib/ansible/plugins/connections/chroot.py
index cc5cee7803..5dfd712ef7 100644
--- a/lib/ansible/plugins/connections/chroot.py
+++ b/lib/ansible/plugins/connections/chroot.py
@@ -24,47 +24,45 @@ import traceback
import os
import shlex
import subprocess
-from ansible import errors
+from ansible.errors import AnsibleError
from ansible import utils
from ansible.utils.unicode import to_bytes
-from ansible.callbacks import vvv
import ansible.constants as C
-BUFSIZE = 65536
-class Connection(object):
+class Connection(ConnectionBase):
''' Local chroot based connections '''
- def __init__(self, runner, host, port, *args, **kwargs):
- self.chroot = host
- self.has_pipelining = False
- self.become_methods_supported=C.BECOME_METHODS
+ BUFSIZE = 65536
+ has_pipelining = False
+
+ def __init__(self, *args, **kwargs):
+
+ super(Connection, self).__init__(*args, **kwargs)
+
+ self.chroot = self._play_context.remote_addr
if os.geteuid() != 0:
- raise errors.AnsibleError("chroot connection requires running as root")
+ raise AnsibleError("chroot connection requires running as root")
# we're running as root on the local system so do some
# trivial checks for ensuring 'host' is actually a chroot'able dir
if not os.path.isdir(self.chroot):
- raise errors.AnsibleError("%s is not a directory" % self.chroot)
+ raise AnsibleError("%s is not a directory" % self.chroot)
chrootsh = os.path.join(self.chroot, 'bin/sh')
if not utils.is_executable(chrootsh):
- raise errors.AnsibleError("%s does not look like a chrootable dir (/bin/sh missing)" % self.chroot)
+ raise AnsibleError("%s does not look like a chrootable dir (/bin/sh missing)" % self.chroot)
self.chroot_cmd = distutils.spawn.find_executable('chroot')
if not self.chroot_cmd:
- raise errors.AnsibleError("chroot command not found in PATH")
+ raise AnsibleError("chroot command not found in PATH")
- self.runner = runner
- self.host = host
- # port is unused, since this is local
- self.port = port
- def connect(self, port=None):
+ def _connect(self, port=None):
''' connect to the chroot; nothing to do here '''
- vvv("THIS IS A LOCAL CHROOT DIR", host=self.chroot)
+ self._display.vvv("THIS IS A LOCAL CHROOT DIR", host=self.chroot)
return self
@@ -89,15 +87,15 @@ class Connection(object):
'''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
- raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
+ raise AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data:
- raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
+ raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
# We enter zone as root so we ignore privilege escalation (probably need to fix in case we have to become a specific used [ex: postgres admin])?
local_cmd = self._generate_cmd(executable, cmd)
- vvv("EXEC %s" % (local_cmd), host=self.chroot)
+ self._display.vvv("EXEC %s" % (local_cmd), host=self.chroot)
p = subprocess.Popen(local_cmd, shell=False,
cwd=self.runner.basedir,
stdin=stdin,
@@ -116,33 +114,33 @@ class Connection(object):
def put_file(self, in_path, out_path):
''' transfer a file from local to chroot '''
- vvv("PUT %s TO %s" % (in_path, out_path), host=self.chroot)
+ self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.chroot)
try:
with open(in_path, 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), None, stdin=in_file)
except OSError:
- raise errors.AnsibleError("chroot connection requires dd command in the chroot")
+ raise AnsibleError("chroot connection requires dd command in the chroot")
try:
stdout, stderr = p.communicate()
except:
traceback.print_exc()
- raise errors.AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
+ raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
if p.returncode != 0:
- raise errors.AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
+ raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
except IOError:
- raise errors.AnsibleError("file or module does not exist at: %s" % in_path)
+ raise AnsibleError("file or module does not exist at: %s" % in_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from chroot to local '''
- vvv("FETCH %s TO %s" % (in_path, out_path), host=self.chroot)
+ self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.chroot)
try:
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE), None)
except OSError:
- raise errors.AnsibleError("chroot connection requires dd command in the chroot")
+ raise AnsibleError("chroot connection requires dd command in the chroot")
with open(out_path, 'wb+') as out_file:
try:
@@ -152,10 +150,10 @@ class Connection(object):
chunk = p.stdout.read(BUFSIZE)
except:
traceback.print_exc()
- raise errors.AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
+ raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
stdout, stderr = p.communicate()
if p.returncode != 0:
- raise errors.AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
+ raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def close(self):
''' terminate the connection; nothing to do here '''
diff --git a/lib/ansible/plugins/connections/paramiko_ssh.py b/lib/ansible/plugins/connections/paramiko_ssh.py
index b3932b613d..df97a6e3a5 100644
--- a/lib/ansible/plugins/connections/paramiko_ssh.py
+++ b/lib/ansible/plugins/connections/paramiko_ssh.py
@@ -179,6 +179,7 @@ class Connection(ConnectionBase):
key_filename=key_filename,
password=self._play_context.password,
timeout=self._play_context.timeout,
+ compress=True,
port=port,
)
except Exception as e:
@@ -229,23 +230,29 @@ class Connection(ConnectionBase):
chan.exec_command(cmd)
if self._play_context.prompt:
if self._play_context.become and self._play_context.become_pass:
+ passprompt = False
while True:
self._display.debug('Waiting for Privilege Escalation input')
- if self.check_become_success(become_output) or self.check_password_prompt(become_output):
+ if self.check_become_success(become_output):
break
+ elif self.check_password_prompt(become_output):
+ passprompt = True
+ break
+
chunk = chan.recv(bufsize)
- print("chunk is: %s" % chunk)
+ self._display.debug("chunk is: %s" % chunk)
if not chunk:
if 'unknown user' in become_output:
- raise AnsibleError(
- 'user %s does not exist' % become_user)
+ raise AnsibleError( 'user %s does not exist' % become_user)
else:
- raise AnsibleError('ssh connection ' +
- 'closed waiting for password prompt')
+ break
+ #raise AnsibleError('ssh connection closed waiting for password prompt')
become_output += chunk
- if not self.check_become_success(become_output):
- if self._play_context.become:
+ if passprompt:
+ if self._play_context.become and self._play_context.become_pass:
chan.sendall(self._play_context.become_pass + '\n')
+ else:
+ raise AnsibleError("A password is reqired but none was supplied")
else:
no_prompt_out += become_output
no_prompt_err += become_output
diff --git a/lib/ansible/plugins/connections/ssh.py b/lib/ansible/plugins/connections/ssh.py
index b0b8a3db7d..9c16168413 100644
--- a/lib/ansible/plugins/connections/ssh.py
+++ b/lib/ansible/plugins/connections/ssh.py
@@ -57,11 +57,7 @@ class Connection(ConnectionBase):
super(Connection, self).__init__(*args, **kwargs)
- # FIXME: make this work, should be set from connection info
- self._ipv6 = False
self.host = self._play_context.remote_addr
- if self._ipv6:
- self.host = '[%s]' % self.host
@property
def transport(self):
@@ -243,21 +239,23 @@ class Connection(ConnectionBase):
tokens = line.split()
if not tokens:
continue
- if tokens[0].find(self.HASHED_KEY_MAGIC) == 0:
- # this is a hashed known host entry
- try:
- (kn_salt,kn_host) = tokens[0][len(self.HASHED_KEY_MAGIC):].split("|",2)
- hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
- hash.update(host)
- if hash.digest() == kn_host.decode('base64'):
+
+ if isinstance(tokens, list) and tokens: # skip invalid hostlines
+ if tokens[0].find(self.HASHED_KEY_MAGIC) == 0:
+ # this is a hashed known host entry
+ try:
+ (kn_salt,kn_host) = tokens[0][len(self.HASHED_KEY_MAGIC):].split("|",2)
+ hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
+ hash.update(host)
+ if hash.digest() == kn_host.decode('base64'):
+ return False
+ except:
+ # invalid hashed host key, skip it
+ continue
+ else:
+ # standard host file entry
+ if host in tokens[0]:
return False
- except:
- # invalid hashed host key, skip it
- continue
- else:
- # standard host file entry
- if host in tokens[0]:
- return False
if (hfiles_not_found == len(host_file_list)):
self._display.vvv("EXEC previous known host file not found for {0}".format(host))
@@ -340,8 +338,6 @@ class Connection(ConnectionBase):
ssh_cmd.append("-q")
ssh_cmd += self._common_args
- if self._ipv6:
- ssh_cmd += ['-6']
ssh_cmd.append(self.host)
ssh_cmd.append(cmd)
@@ -375,11 +371,19 @@ class Connection(ConnectionBase):
become_output = ''
become_errput = ''
+ passprompt = False
while True:
self._display.debug('Waiting for Privilege Escalation input')
- if self.check_become_success(become_output) or self.check_password_prompt(become_output):
+
+ if self.check_become_success(become_output + become_errput):
+ self._display.debug('Succeded!')
+ break
+ elif self.check_password_prompt(become_output) or self.check_password_prompt(become_errput):
+ self._display.debug('Password prompt!')
+ passprompt = True
break
+ self._display.debug('Read next chunks')
rfd, wfd, efd = select.select([p.stdout, p.stderr], [], [p.stdout], self._play_context.timeout)
if not rfd:
# timeout. wrap up process communication
@@ -389,16 +393,20 @@ class Connection(ConnectionBase):
elif p.stderr in rfd:
chunk = p.stderr.read()
become_errput += chunk
+ self._display.debug('stderr chunk is: %s' % chunk)
self.check_incorrect_password(become_errput)
elif p.stdout in rfd:
chunk = p.stdout.read()
become_output += chunk
+ self._display.debug('stdout chunk is: %s' % chunk)
+
if not chunk:
- raise AnsibleError('Connection closed waiting for privilege escalation password prompt: %s ' % become_output)
+ break
+ #raise AnsibleError('Connection closed waiting for privilege escalation password prompt: %s ' % become_output)
- if not self.check_become_success(become_output):
+ if passprompt:
self._display.debug("Sending privilege escalation password.")
stdin.write(self._play_context.become_pass + '\n')
else:
@@ -435,15 +443,19 @@ class Connection(ConnectionBase):
raise AnsibleFileNotFound("file or module does not exist: {0}".format(in_path))
cmd = self._password_cmd()
+ # scp and sftp require square brackets for IPv6 addresses, but
+ # accept them for hostnames and IPv4 addresses too.
+ host = '[%s]' % self.host
+
if C.DEFAULT_SCP_IF_SSH:
cmd.append('scp')
cmd.extend(self._common_args)
- cmd.extend([in_path, '{0}:{1}'.format(self.host, pipes.quote(out_path))])
+ cmd.extend([in_path, '{0}:{1}'.format(host, pipes.quote(out_path))])
indata = None
else:
cmd.append('sftp')
cmd.extend(self._common_args)
- cmd.append(self.host)
+ cmd.append(host)
indata = "put {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
(p, stdin) = self._run(cmd, indata)
diff --git a/lib/ansible/plugins/connections/winrm.py b/lib/ansible/plugins/connections/winrm.py
index d97db39662..0e19b93ac2 100644
--- a/lib/ansible/plugins/connections/winrm.py
+++ b/lib/ansible/plugins/connections/winrm.py
@@ -45,7 +45,7 @@ from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNo
from ansible.plugins.connections import ConnectionBase
from ansible.plugins import shell_loader
from ansible.utils.path import makedirs_safe
-from ansible.utils.unicode import to_bytes
+from ansible.utils.unicode import to_bytes, to_unicode
class Connection(ConnectionBase):
'''WinRM connections over HTTP/HTTPS.'''
@@ -94,7 +94,7 @@ class Connection(ConnectionBase):
endpoint = parse.urlunsplit((scheme, netloc, '/wsman', '', ''))
- self._display.debug('WINRM CONNECT: transport=%s endpoint=%s' % (transport, endpoint), host=self._play_context.remote_addr)
+ self._display.vvvvv('WINRM CONNECT: transport=%s endpoint=%s' % (transport, endpoint), host=self._play_context.remote_addr)
protocol = Protocol(
endpoint,
transport=transport,
@@ -117,30 +117,30 @@ class Connection(ConnectionBase):
raise AnsibleError("the username/password specified for this server was incorrect")
elif code == 411:
return protocol
- self._display.debug('WINRM CONNECTION ERROR: %s' % err_msg, host=self._play_context.remote_addr)
+ self._display.vvvvv('WINRM CONNECTION ERROR: %s' % err_msg, host=self._play_context.remote_addr)
continue
if exc:
raise AnsibleError(str(exc))
def _winrm_exec(self, command, args=(), from_exec=False):
if from_exec:
- self._display.debug("WINRM EXEC %r %r" % (command, args), host=self._play_context.remote_addr)
+ self._display.vvvvv("WINRM EXEC %r %r" % (command, args), host=self._play_context.remote_addr)
else:
- self._display.debugv("WINRM EXEC %r %r" % (command, args), host=self._play_context.remote_addr)
+ self._display.vvvvvv("WINRM EXEC %r %r" % (command, args), host=self._play_context.remote_addr)
if not self.protocol:
self.protocol = self._winrm_connect()
if not self.shell_id:
- self.shell_id = self.protocol.open_shell()
+ self.shell_id = self.protocol.open_shell(codepage=65001) # UTF-8
command_id = None
try:
- command_id = self.protocol.run_command(self.shell_id, command, args)
+ command_id = self.protocol.run_command(self.shell_id, to_bytes(command), map(to_bytes, args))
response = Response(self.protocol.get_command_output(self.shell_id, command_id))
if from_exec:
- self._display.debug('WINRM RESULT %r' % response, host=self._play_context.remote_addr)
+ self._display.vvvvv('WINRM RESULT %r' % to_unicode(response), host=self._play_context.remote_addr)
else:
- self._display.debugv('WINRM RESULT %r' % response, host=self._play_context.remote_addr)
- self._display.debugv('WINRM STDOUT %s' % response.std_out, host=self._play_context.remote_addr)
- self._display.debugv('WINRM STDERR %s' % response.std_err, host=self._play_context.remote_addr)
+ self._display.vvvvv('WINRM RESULT %r' % to_unicode(response), host=self._play_context.remote_addr)
+ self._display.vvvvvv('WINRM STDOUT %s' % to_unicode(response.std_out), host=self._play_context.remote_addr)
+ self._display.vvvvvv('WINRM STDERR %s' % to_unicode(response.std_err), host=self._play_context.remote_addr)
return response
finally:
if command_id:
@@ -153,34 +153,42 @@ class Connection(ConnectionBase):
def exec_command(self, cmd, tmp_path, in_data=None, sudoable=True):
super(Connection, self).exec_command(cmd, tmp_path, in_data=in_data, sudoable=sudoable)
-
- cmd = to_bytes(cmd)
- cmd_parts = shlex.split(cmd, posix=False)
+ cmd_parts = shlex.split(to_bytes(cmd), posix=False)
+ cmd_parts = map(to_unicode, cmd_parts)
+ script = None
+ cmd_ext = cmd_parts and self._shell._unquote(cmd_parts[0]).lower()[-4:] or ''
+ # Support running .ps1 files (via script/raw).
+ if cmd_ext == '.ps1':
+ script = ' '.join(['&'] + cmd_parts)
+ # Support running .bat/.cmd files; change back to the default system encoding instead of UTF-8.
+ elif cmd_ext in ('.bat', '.cmd'):
+ script = ' '.join(['[System.Console]::OutputEncoding = [System.Text.Encoding]::Default;', '&'] + cmd_parts)
+ # Encode the command if not already encoded; supports running simple PowerShell commands via raw.
+ elif '-EncodedCommand' not in cmd_parts:
+ script = ' '.join(cmd_parts)
+ if script:
+ cmd_parts = self._shell._encode_script(script, as_list=True)
if '-EncodedCommand' in cmd_parts:
encoded_cmd = cmd_parts[cmd_parts.index('-EncodedCommand') + 1]
- decoded_cmd = base64.b64decode(encoded_cmd)
+ decoded_cmd = to_unicode(base64.b64decode(encoded_cmd))
self._display.vvv("EXEC %s" % decoded_cmd, host=self._play_context.remote_addr)
else:
self._display.vvv("EXEC %s" % cmd, host=self._play_context.remote_addr)
- # For script/raw support.
- if cmd_parts and cmd_parts[0].lower().endswith('.ps1'):
- script = self._shell._build_file_cmd(cmd_parts, quote_args=False)
- cmd_parts = self._shell._encode_script(script, as_list=True)
try:
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], from_exec=True)
except Exception as e:
traceback.print_exc()
raise AnsibleError("failed to exec cmd %s" % cmd)
- result.std_out = to_bytes(result.std_out)
- result.std_err = to_bytes(result.std_err)
+ result.std_out = to_unicode(result.std_out)
+ result.std_err = to_unicode(result.std_err)
return (result.status_code, '', result.std_out, result.std_err)
def put_file(self, in_path, out_path):
super(Connection, self).put_file(in_path, out_path)
-
- self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr)
+ out_path = self._shell._unquote(out_path)
+ self._display.vvv('PUT "%s" TO "%s"' % (in_path, out_path), host=self._play_context.remote_addr)
if not os.path.exists(in_path):
- raise AnsibleFileNotFound("file or module does not exist: %s" % in_path)
+ raise AnsibleFileNotFound('file or module does not exist: "%s"' % in_path)
with open(in_path) as in_file:
in_size = os.path.getsize(in_path)
script_template = '''
@@ -206,20 +214,20 @@ class Connection(ConnectionBase):
out_path = out_path + '.ps1'
b64_data = base64.b64encode(out_data)
script = script_template % (self._shell._escape(out_path), offset, b64_data, in_size)
- self._display.debug("WINRM PUT %s to %s (offset=%d size=%d)" % (in_path, out_path, offset, len(out_data)), host=self._play_context.remote_addr)
+ self._display.vvvvv('WINRM PUT "%s" to "%s" (offset=%d size=%d)' % (in_path, out_path, offset, len(out_data)), host=self._play_context.remote_addr)
cmd_parts = self._shell._encode_script(script, as_list=True)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
if result.status_code != 0:
- raise IOError(result.std_err.encode('utf-8'))
+ raise IOError(to_unicode(result.std_err))
except Exception:
traceback.print_exc()
- raise AnsibleError("failed to transfer file to %s" % out_path)
+ raise AnsibleError('failed to transfer file to "%s"' % out_path)
def fetch_file(self, in_path, out_path):
super(Connection, self).fetch_file(in_path, out_path)
-
+ in_path = self._shell._unquote(in_path)
out_path = out_path.replace('\\', '/')
- self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr)
+ self._display.vvv('FETCH "%s" TO "%s"' % (in_path, out_path), host=self._play_context.remote_addr)
buffer_size = 2**19 # 0.5MB chunks
makedirs_safe(os.path.dirname(out_path))
out_file = None
@@ -248,11 +256,11 @@ class Connection(ConnectionBase):
Exit 1;
}
''' % dict(buffer_size=buffer_size, path=self._shell._escape(in_path), offset=offset)
- self._display.debug("WINRM FETCH %s to %s (offset=%d)" % (in_path, out_path, offset), host=self._play_context.remote_addr)
+ self._display.vvvvv('WINRM FETCH "%s" to "%s" (offset=%d)' % (in_path, out_path, offset), host=self._play_context.remote_addr)
cmd_parts = self._shell._encode_script(script, as_list=True)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
if result.status_code != 0:
- raise IOError(result.std_err.encode('utf-8'))
+ raise IOError(to_unicode(result.std_err))
if result.std_out.strip() == '[DIR]':
data = None
else:
@@ -272,7 +280,7 @@ class Connection(ConnectionBase):
offset += len(data)
except Exception:
traceback.print_exc()
- raise AnsibleError("failed to transfer file to %s" % out_path)
+ raise AnsibleError('failed to transfer file to "%s"' % out_path)
finally:
if out_file:
out_file.close()
diff --git a/lib/ansible/plugins/filter/core.py b/lib/ansible/plugins/filter/core.py
index b8b506e508..84e055b61f 100644
--- a/lib/ansible/plugins/filter/core.py
+++ b/lib/ansible/plugins/filter/core.py
@@ -21,6 +21,7 @@ import sys
import base64
import json
import os.path
+import ntpath
import types
import pipes
import glob
@@ -85,55 +86,6 @@ def to_nice_json(a, *args, **kw):
return to_json(a, *args, **kw)
return json.dumps(a, indent=4, sort_keys=True, *args, **kw)
-def failed(*a, **kw):
- ''' Test if task result yields failed '''
- item = a[0]
- if type(item) != dict:
- raise errors.AnsibleFilterError("|failed expects a dictionary")
- rc = item.get('rc',0)
- failed = item.get('failed',False)
- if rc != 0 or failed:
- return True
- else:
- return False
-
-def success(*a, **kw):
- ''' Test if task result yields success '''
- return not failed(*a, **kw)
-
-def changed(*a, **kw):
- ''' Test if task result yields changed '''
- item = a[0]
- if type(item) != dict:
- raise errors.AnsibleFilterError("|changed expects a dictionary")
- if not 'changed' in item:
- changed = False
- if ('results' in item # some modules return a 'results' key
- and type(item['results']) == list
- and type(item['results'][0]) == dict):
- for result in item['results']:
- changed = changed or result.get('changed', False)
- else:
- changed = item.get('changed', False)
- return changed
-
-def skipped(*a, **kw):
- ''' Test if task result yields skipped '''
- item = a[0]
- if type(item) != dict:
- raise errors.AnsibleFilterError("|skipped expects a dictionary")
- skipped = item.get('skipped', False)
- return skipped
-
-def mandatory(a):
- ''' Make a variable mandatory '''
- try:
- a
- except NameError:
- raise errors.AnsibleFilterError('Mandatory variable not defined.')
- else:
- return a
-
def bool(a):
''' return a bool for the arg '''
if a is None or type(a) == bool:
@@ -153,27 +105,6 @@ def fileglob(pathname):
''' return list of matched files for glob '''
return glob.glob(pathname)
-def regex(value='', pattern='', ignorecase=False, match_type='search'):
- ''' Expose `re` as a boolean filter using the `search` method by default.
- This is likely only useful for `search` and `match` which already
- have their own filters.
- '''
- if ignorecase:
- flags = re.I
- else:
- flags = 0
- _re = re.compile(pattern, flags=flags)
- _bool = __builtins__.get('bool')
- return _bool(getattr(_re, match_type, 'search')(value))
-
-def match(value, pattern='', ignorecase=False):
- ''' Perform a `re.match` returning a boolean '''
- return regex(value, pattern, ignorecase, 'match')
-
-def search(value, pattern='', ignorecase=False):
- ''' Perform a `re.search` returning a boolean '''
- return regex(value, pattern, ignorecase, 'search')
-
def regex_replace(value='', pattern='', replacement='', ignorecase=False):
''' Perform a `re.sub` returning a string '''
@@ -292,6 +223,14 @@ def get_encrypted_password(password, hashtype='sha512', salt=None):
def to_uuid(string):
return str(uuid.uuid5(UUID_NAMESPACE_ANSIBLE, str(string)))
+def mandatory(a):
+ from jinja2.runtime import Undefined
+
+ ''' Make a variable mandatory '''
+ if isinstance(a, Undefined):
+ raise errors.AnsibleFilterError('Mandatory variable not defined.')
+ return a
+
class FilterModule(object):
''' Ansible core jinja2 filters '''
@@ -321,19 +260,9 @@ class FilterModule(object):
'realpath': partial(unicode_wrap, os.path.realpath),
'relpath': partial(unicode_wrap, os.path.relpath),
'splitext': partial(unicode_wrap, os.path.splitext),
-
- # failure testing
- 'failed' : failed,
- 'success' : success,
-
- # changed testing
- 'changed' : changed,
-
- # skip testing
- 'skipped' : skipped,
-
- # variable existence
- 'mandatory': mandatory,
+ 'win_basename': partial(unicode_wrap, ntpath.basename),
+ 'win_dirname': partial(unicode_wrap, ntpath.dirname),
+ 'win_splitdrive': partial(unicode_wrap, ntpath.splitdrive),
# value as boolean
'bool': bool,
@@ -356,9 +285,6 @@ class FilterModule(object):
'fileglob': fileglob,
# regex
- 'match': match,
- 'search': search,
- 'regex': regex,
'regex_replace': regex_replace,
'regex_escape': regex_escape,
@@ -372,4 +298,6 @@ class FilterModule(object):
# random stuff
'random': rand,
'shuffle': randomize_list,
+ # undefined
+ 'mandatory': mandatory,
}
diff --git a/lib/ansible/plugins/inventory/README.md b/lib/ansible/plugins/inventory/README.md
new file mode 100644
index 0000000000..1988f9adc6
--- /dev/null
+++ b/lib/ansible/plugins/inventory/README.md
@@ -0,0 +1 @@
+These are not currently in use, but this is what the future of inventory will become after 2.0
diff --git a/lib/ansible/plugins/inventory/directory.py b/lib/ansible/plugins/inventory/directory.py
index a75ad44ea6..2a2a3c3193 100644
--- a/lib/ansible/plugins/inventory/directory.py
+++ b/lib/ansible/plugins/inventory/directory.py
@@ -22,11 +22,14 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
+from ansible import constants as C
from . aggregate import InventoryAggregateParser
class InventoryDirectoryParser(InventoryAggregateParser):
+ CONDITION="is_dir(%s)"
+
def __init__(self, inven_directory):
directory = inven_directory
names = os.listdir(inven_directory)
@@ -35,7 +38,7 @@ class InventoryDirectoryParser(InventoryAggregateParser):
# Clean up the list of filenames
for filename in names:
# Skip files that end with certain extensions or characters
- if any(filename.endswith(ext) for ext in ("~", ".orig", ".bak", ".ini", ".retry", ".pyc", ".pyo")):
+ if any(filename.endswith(ext) for ext in C.DEFAULT_INVENTORY_IGNORE):
continue
# Skip hidden files
if filename.startswith('.') and not filename.startswith('.{0}'.format(os.path.sep)):
diff --git a/lib/ansible/plugins/inventory/ini.py b/lib/ansible/plugins/inventory/ini.py
index e185c1a785..bc6a2bc489 100644
--- a/lib/ansible/plugins/inventory/ini.py
+++ b/lib/ansible/plugins/inventory/ini.py
@@ -23,10 +23,13 @@ __metaclass__ = type
import os
+from ansible import constants as C
from . import InventoryParser
class InventoryIniParser(InventoryAggregateParser):
+ CONDITION="is_file(%s)"
+
def __init__(self, inven_directory):
directory = inven_directory
names = os.listdir(inven_directory)
@@ -35,7 +38,7 @@ class InventoryIniParser(InventoryAggregateParser):
# Clean up the list of filenames
for filename in names:
# Skip files that end with certain extensions or characters
- if any(filename.endswith(ext) for ext in ("~", ".orig", ".bak", ".ini", ".retry", ".pyc", ".pyo")):
+ if any(filename.endswith(ext) for ext in C.DEFAULT_INVENTORY_IGNORE):
continue
# Skip hidden files
if filename.startswith('.') and not filename.startswith('.{0}'.format(os.path.sep)):
@@ -50,11 +53,3 @@ class InventoryIniParser(InventoryAggregateParser):
def parse(self):
return super(InventoryDirectoryParser, self).parse()
-
- def _before_comment(self, msg):
- ''' what's the part of a string before a comment? '''
- msg = msg.replace("\#","**NOT_A_COMMENT**")
- msg = msg.split("#")[0]
- msg = msg.replace("**NOT_A_COMMENT**","#")
- return msg
-
diff --git a/lib/ansible/plugins/inventory/script.py b/lib/ansible/plugins/inventory/script.py
new file mode 100644
index 0000000000..1346bf3562
--- /dev/null
+++ b/lib/ansible/plugins/inventory/script.py
@@ -0,0 +1,31 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+#############################################
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+
+from ansible import constants as C
+from . import InventoryParser
+
+class InventoryScriptParser(InventoryParser):
+
+ CONDITION="is_file(%s) and is_executable(%s)"
diff --git a/lib/ansible/plugins/lookup/__init__.py b/lib/ansible/plugins/lookup/__init__.py
index 9ce4add701..5abca22ac7 100644
--- a/lib/ansible/plugins/lookup/__init__.py
+++ b/lib/ansible/plugins/lookup/__init__.py
@@ -28,8 +28,9 @@ except ImportError:
__all__ = ['LookupBase']
class LookupBase:
- def __init__(self, loader=None, **kwargs):
+ def __init__(self, loader=None, templar=None, **kwargs):
self._loader = loader
+ self._templar = templar
self._display = display
def get_basedir(self, variables):
diff --git a/lib/ansible/plugins/lookup/cartesian.py b/lib/ansible/plugins/lookup/cartesian.py
index 7d8e08cb94..ce31ef9700 100644
--- a/lib/ansible/plugins/lookup/cartesian.py
+++ b/lib/ansible/plugins/lookup/cartesian.py
@@ -29,16 +29,22 @@ class LookupModule(LookupBase):
[1, 2, 3], [a, b] -> [1, a], [1, b], [2, a], [2, b], [3, a], [3, b]
"""
- def __lookup_variabless(self, terms, variables):
+ def _lookup_variables(self, terms):
+ """
+ Turn this:
+ terms == ["1,2,3", "a,b"]
+ into this:
+ terms == [[1,2,3], [a, b]]
+ """
results = []
for x in terms:
- intermediate = listify_lookup_plugin_terms(x, variables, loader=self._loader)
+ intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader)
results.append(intermediate)
return results
def run(self, terms, variables=None, **kwargs):
- terms = self.__lookup_variabless(terms, variables)
+ terms = self._lookup_variables(terms)
my_list = terms[:]
if len(my_list) == 0:
diff --git a/lib/ansible/plugins/lookup/consul_kv.py b/lib/ansible/plugins/lookup/consul_kv.py
new file mode 100755
index 0000000000..5da1a5bef0
--- /dev/null
+++ b/lib/ansible/plugins/lookup/consul_kv.py
@@ -0,0 +1,133 @@
+# (c) 2015, Steve Gargan <steve.gargan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+'''
+Lookup plugin to grab metadata from a consul key value store.
+============================================================
+
+Plugin will lookup metadata for a playbook from the key value store in a
+consul cluster. Values can be easily set in the kv store with simple rest
+commands e.g.
+
+curl -X PUT -d 'some-value' http://localhost:8500/v1/kv/ansible/somedata
+
+this can then be looked up in a playbook as follows
+
+- debug: msg='key contains {{item}}'
+ with_consul_kv:
+ - 'key/to/retrieve'
+
+
+Parameters can be provided after the key be more specific about what to retrieve e.g.
+
+- debug: msg='key contains {{item}}'
+ with_consul_kv:
+ - 'key/to recurse=true token=E6C060A9-26FB-407A-B83E-12DDAFCB4D98')}}'
+
+recurse: if true, will retrieve all the values that have the given key as prefix
+index: if the key has a value with the specified index then this is returned
+ allowing access to historical values.
+token: acl token to allow access to restricted values.
+
+By default this will lookup keys via the consul agent running on http://localhost:8500
+this can be changed by setting the env variable 'ANSIBLE_CONSUL_URL' to point to the url
+of the kv store you'd like to use.
+
+'''
+
+######################################################################
+
+import os
+import sys
+from urlparse import urlparse
+from ansible.errors import AnsibleError
+from ansible.plugins.lookup import LookupBase
+
+try:
+ import json
+except ImportError:
+ import simplejson as json
+
+try:
+ import consul
+ HAS_CONSUL = True
+except ImportError, e:
+ HAS_CONSUL = False
+
+
+class LookupModule(LookupBase):
+
+ def __init__(self, loader=None, templar=None, **kwargs):
+
+ super(LookupBase, self).__init__(loader, templar, **kwargs)
+
+ self.agent_url = 'http://localhost:8500'
+ if os.getenv('ANSIBLE_CONSUL_URL') is not None:
+ self.agent_url = os.environ['ANSIBLE_CONSUL_URL']
+
+ def run(self, terms, variables=None, **kwargs):
+
+ if not HAS_CONSUL:
+ raise AnsibleError('python-consul is required for consul_kv lookup. see http://python-consul.readthedocs.org/en/latest/#installation')
+
+ u = urlparse(self.agent_url)
+ consul_api = consul.Consul(host=u.hostname, port=u.port)
+
+ values = []
+ try:
+ for term in terms:
+ params = self.parse_params(term)
+ results = consul_api.kv.get(params['key'],
+ token=params['token'],
+ index=params['index'],
+ recurse=params['recurse'])
+ if results[1]:
+ # responds with a single or list of result maps
+ if isinstance(results[1], list):
+ for r in results[1]:
+ values.append(r['Value'])
+ else:
+ values.append(results[1]['Value'])
+ except Exception, e:
+ raise AnsibleError(
+ "Error locating '%s' in kv store. Error was %s" % (term, e))
+
+ return values
+
+ def parse_params(self, term):
+ params = term.split(' ')
+
+ paramvals = {
+ 'key': params[0],
+ 'token': None,
+ 'recurse': False,
+ 'index': None
+ }
+
+ # parameters specified?
+ try:
+ for param in params[1:]:
+ if param and len(param) > 0:
+ name, value = param.split('=')
+ assert name in paramvals, "% not a valid consul lookup parameter" % name
+ paramvals[name] = value
+ except (ValueError, AssertionError), e:
+ raise AnsibleError(e)
+
+ return paramvals
diff --git a/lib/ansible/plugins/lookup/credstash.py b/lib/ansible/plugins/lookup/credstash.py
new file mode 100644
index 0000000000..9d548baea6
--- /dev/null
+++ b/lib/ansible/plugins/lookup/credstash.py
@@ -0,0 +1,48 @@
+# (c) 2015, Ensighten <infra@ensighten.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.errors import AnsibleError
+from ansible.plugins.lookup import LookupBase
+
+CREDSTASH_INSTALLED = False
+
+try:
+ import credstash
+ CREDSTASH_INSTALLED = True
+except ImportError:
+ CREDSTASH_INSTALLED = False
+
+
+class LookupModule(LookupBase):
+ def run(self, terms, variables, **kwargs):
+
+ if not CREDSTASH_INSTALLED:
+ raise AnsibleError('The credstash lookup plugin requires credstash to be installed.')
+
+ ret = []
+ for term in terms:
+ try:
+ val = credstash.getSecret(term, **kwargs)
+ except credstash.ItemNotFound:
+ raise AnsibleError('Key {0} not found'.format(term))
+ except Exception, e:
+ raise AnsibleError('Encountered exception while fetching {0}: {1}'.format(term, e.message))
+ ret.append(val)
+
+ return ret
diff --git a/lib/ansible/plugins/lookup/csvfile.py b/lib/ansible/plugins/lookup/csvfile.py
index 1a27788722..478f063a12 100644
--- a/lib/ansible/plugins/lookup/csvfile.py
+++ b/lib/ansible/plugins/lookup/csvfile.py
@@ -42,9 +42,6 @@ class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
- if isinstance(terms, basestring):
- terms = [ terms ]
-
basedir = self.get_basedir(variables)
ret = []
diff --git a/lib/ansible/plugins/lookup/dig.py b/lib/ansible/plugins/lookup/dig.py
new file mode 100644
index 0000000000..acd73ddc19
--- /dev/null
+++ b/lib/ansible/plugins/lookup/dig.py
@@ -0,0 +1,211 @@
+# (c) 2015, Jan-Piet Mens <jpmens(at)gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.errors import AnsibleError
+from ansible.plugins.lookup import LookupBase
+import socket
+
+try:
+ import dns.resolver
+ import dns.reversename
+ from dns.rdatatype import *
+ from dns.exception import DNSException
+ HAVE_DNS = True
+except ImportError:
+ HAVE_DNS = False
+
+def make_rdata_dict(rdata):
+ ''' While the 'dig' lookup plugin supports anything which dnspython supports
+ out of the box, the following supported_types list describes which
+ DNS query types we can convert to a dict.
+
+ Note: adding support for RRSIG is hard work. :)
+ '''
+ supported_types = {
+ A : ['address'],
+ AAAA : ['address'],
+ CNAME : ['target'],
+ DNAME : ['target'],
+ DLV : ['algorithm', 'digest_type', 'key_tag', 'digest'],
+ DNSKEY : ['flags', 'algorithm', 'protocol', 'key'],
+ DS : ['algorithm', 'digest_type', 'key_tag', 'digest'],
+ HINFO : ['cpu', 'os'],
+ LOC : ['latitude', 'longitude', 'altitude', 'size', 'horizontal_precision', 'vertical_precision'],
+ MX : ['preference', 'exchange'],
+ NAPTR : ['order', 'preference', 'flags', 'service', 'regexp', 'replacement'],
+ NS : ['target'],
+ NSEC3PARAM : ['algorithm', 'flags', 'iterations', 'salt'],
+ PTR : ['target'],
+ RP : ['mbox', 'txt'],
+ # RRSIG : ['algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'signature'],
+ SOA : ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', 'minimum'],
+ SPF : ['strings'],
+ SRV : ['priority', 'weight', 'port', 'target'],
+ SSHFP : ['algorithm', 'fp_type', 'fingerprint'],
+ TLSA : ['usage', 'selector', 'mtype', 'cert'],
+ TXT : ['strings'],
+ }
+
+ rd = {}
+
+ if rdata.rdtype in supported_types:
+ fields = supported_types[rdata.rdtype]
+ for f in fields:
+ val = rdata.__getattribute__(f)
+
+ if type(val) == dns.name.Name:
+ val = dns.name.Name.to_text(val)
+
+ if rdata.rdtype == DLV and f == 'digest':
+ val = dns.rdata._hexify(rdata.digest).replace(' ', '')
+ if rdata.rdtype == DS and f == 'digest':
+ val = dns.rdata._hexify(rdata.digest).replace(' ', '')
+ if rdata.rdtype == DNSKEY and f == 'key':
+ val = dns.rdata._base64ify(rdata.key).replace(' ', '')
+ if rdata.rdtype == NSEC3PARAM and f == 'salt':
+ val = dns.rdata._hexify(rdata.salt).replace(' ', '')
+ if rdata.rdtype == SSHFP and f == 'fingerprint':
+ val = dns.rdata._hexify(rdata.fingerprint).replace(' ', '')
+ if rdata.rdtype == TLSA and f == 'cert':
+ val = dns.rdata._hexify(rdata.cert).replace(' ', '')
+
+
+ rd[f] = val
+
+ return rd
+
+# ==============================================================
+# dig: Lookup DNS records
+#
+# --------------------------------------------------------------
+
+class LookupModule(LookupBase):
+
+ def run(self, terms, variables=None, **kwargs):
+
+ '''
+ terms contains a string with things to `dig' for. We support the
+ following formats:
+ example.com # A record
+ example.com qtype=A # same
+ example.com/TXT # specific qtype
+ example.com qtype=txt # same
+ 192.168.1.2/PTR # reverse PTR
+ ^^ shortcut for 2.1.168.192.in-addr.arpa/PTR
+ example.net/AAAA @nameserver # query specified server
+ ^^^ can be comma-sep list of names/addresses
+
+ ... flat=0 # returns a dict; default is 1 == string
+ '''
+
+ if HAVE_DNS == False:
+ raise AnsibleError("Can't LOOKUP(dig): module dns.resolver is not installed")
+
+ # Create Resolver object so that we can set NS if necessary
+ myres = dns.resolver.Resolver()
+ edns_size = 4096
+ myres.use_edns(0, ednsflags=dns.flags.DO, payload=edns_size)
+
+ domain = None
+ qtype = 'A'
+ flat = True
+
+ for t in terms:
+ if t.startswith('@'): # e.g. "@10.0.1.2,192.168.1.1" is ok.
+ nsset = t[1:].split(',')
+ nameservers = []
+ for ns in nsset:
+ # Check if we have a valid IP address. If so, use that, otherwise
+ # try to resolve name to address using system's resolver. If that
+ # fails we bail out.
+ try:
+ socket.inet_aton(ns)
+ nameservers.append(ns)
+ except:
+ try:
+ nsaddr = dns.resolver.query(ns)[0].address
+ nameservers.append(nsaddr)
+ except Exception, e:
+ raise AnsibleError("dns lookup NS: ", str(e))
+ myres.nameservers = nameservers
+ continue
+ if '=' in t:
+ try:
+ opt, arg = t.split('=')
+ except:
+ pass
+
+ if opt == 'qtype':
+ qtype = arg.upper()
+ elif opt == 'flat':
+ flat = int(arg)
+
+ continue
+
+ if '/' in t:
+ try:
+ domain, qtype = t.split('/')
+ except:
+ domain = t
+ else:
+ domain = t
+
+ # print "--- domain = {0} qtype={1}".format(domain, qtype)
+
+ ret = []
+
+ if qtype.upper() == 'PTR':
+ try:
+ n = dns.reversename.from_address(domain)
+ domain = n.to_text()
+ except dns.exception.SyntaxError:
+ pass
+ except Exception, e:
+ raise AnsibleError("dns.reversename unhandled exception", str(e))
+
+ try:
+ answers = myres.query(domain, qtype)
+ for rdata in answers:
+ s = rdata.to_text()
+ if qtype.upper() == 'TXT':
+ s = s[1:-1] # Strip outside quotes on TXT rdata
+
+ if flat:
+ ret.append(s)
+ else:
+ try:
+ rd = make_rdata_dict(rdata)
+ rd['owner'] = answers.canonical_name.to_text()
+ rd['type'] = dns.rdatatype.to_text(rdata.rdtype)
+ rd['ttl'] = answers.rrset.ttl
+
+ ret.append(rd)
+ except Exception, e:
+ ret.append(str(e))
+
+ except dns.resolver.NXDOMAIN:
+ ret.append('NXDOMAIN')
+ except dns.resolver.NoAnswer:
+ ret.append("")
+ except dns.resolver.Timeout:
+ ret.append('')
+ except dns.exception.DNSException, e:
+ raise AnsibleError("dns.resolver unhandled exception", e)
+
+ return ret
diff --git a/lib/ansible/plugins/lookup/dnstxt.py b/lib/ansible/plugins/lookup/dnstxt.py
index e9dd27bfb6..59d3820f71 100644
--- a/lib/ansible/plugins/lookup/dnstxt.py
+++ b/lib/ansible/plugins/lookup/dnstxt.py
@@ -44,9 +44,6 @@ class LookupModule(LookupBase):
if HAVE_DNS == False:
raise AnsibleError("Can't LOOKUP(dnstxt): module dns.resolver is not installed")
- if isinstance(terms, basestring):
- terms = [ terms ]
-
ret = []
for term in terms:
domain = term.split()[0]
diff --git a/lib/ansible/plugins/lookup/env.py b/lib/ansible/plugins/lookup/env.py
index 55847dd777..0314863f6a 100644
--- a/lib/ansible/plugins/lookup/env.py
+++ b/lib/ansible/plugins/lookup/env.py
@@ -25,9 +25,6 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
- if isinstance(terms, basestring):
- terms = [ terms ]
-
ret = []
for term in terms:
var = term.split()[0]
diff --git a/lib/ansible/plugins/lookup/etcd.py b/lib/ansible/plugins/lookup/etcd.py
index 46a81e4d6b..25b5c049e2 100644
--- a/lib/ansible/plugins/lookup/etcd.py
+++ b/lib/ansible/plugins/lookup/etcd.py
@@ -66,9 +66,6 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
- if isinstance(terms, basestring):
- terms = [ terms ]
-
validate_certs = kwargs.get('validate_certs', True)
etcd = Etcd(validate_certs=validate_certs)
diff --git a/lib/ansible/plugins/lookup/file.py b/lib/ansible/plugins/lookup/file.py
index 2498f90c9c..cbfb8f31b4 100644
--- a/lib/ansible/plugins/lookup/file.py
+++ b/lib/ansible/plugins/lookup/file.py
@@ -27,9 +27,6 @@ class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
- if not isinstance(terms, list):
- terms = [ terms ]
-
ret = []
basedir = self.get_basedir(variables)
diff --git a/lib/ansible/plugins/lookup/first_found.py b/lib/ansible/plugins/lookup/first_found.py
index e9fe9a676a..6e0aaee117 100644
--- a/lib/ansible/plugins/lookup/first_found.py
+++ b/lib/ansible/plugins/lookup/first_found.py
@@ -125,7 +125,6 @@ from jinja2.exceptions import UndefinedError
from ansible.errors import AnsibleLookupError, AnsibleUndefinedVariable
from ansible.plugins.lookup import LookupBase
-from ansible.template import Templar
from ansible.utils.boolean import boolean
class LookupModule(LookupBase):
@@ -174,11 +173,10 @@ class LookupModule(LookupBase):
else:
total_search = terms
- templar = Templar(loader=self._loader, variables=variables)
roledir = variables.get('roledir')
for fn in total_search:
try:
- fn = templar.template(fn)
+ fn = self._templar.template(fn)
except (AnsibleUndefinedVariable, UndefinedError) as e:
continue
diff --git a/lib/ansible/plugins/lookup/flattened.py b/lib/ansible/plugins/lookup/flattened.py
index f0a8adaf5e..b9e1009c26 100644
--- a/lib/ansible/plugins/lookup/flattened.py
+++ b/lib/ansible/plugins/lookup/flattened.py
@@ -46,7 +46,7 @@ class LookupModule(LookupBase):
if isinstance(term, basestring):
# convert a variable to a list
- term2 = listify_lookup_plugin_terms(term, variables, loader=self._loader)
+ term2 = listify_lookup_plugin_terms(term, templar=self._templar, loader=self._loader)
# but avoid converting a plain string to a list of one string
if term2 != [ term ]:
term = term2
@@ -63,6 +63,7 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
+ ### FIXME: Is this needed now that listify is run on all lookup plugin terms?
if not isinstance(terms, list):
raise AnsibleError("with_flattened expects a list")
diff --git a/lib/ansible/plugins/lookup/hashi_vault.py b/lib/ansible/plugins/lookup/hashi_vault.py
new file mode 100644
index 0000000000..b1e8adfe0f
--- /dev/null
+++ b/lib/ansible/plugins/lookup/hashi_vault.py
@@ -0,0 +1,90 @@
+# (c) 2015, Jonathan Davila <jdavila(at)ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+# USAGE: {{ lookup('hashi_vault', 'secret=secret/hello token=c975b780-d1be-8016-866b-01d0f9b688a5 url=http://myvault:8200')}}
+#
+# You can skip setting the url if you set the VAULT_ADDR environment variable
+# or if you want it to default to localhost:8200
+#
+# NOTE: Due to a current limitation in the HVAC library there won't
+# necessarily be an error if a bad endpoint is specified.
+#
+# Requires hvac library. Install with pip.
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+
+from ansible.errors import *
+from ansible.plugins.lookup import LookupBase
+
+
+ANSIBLE_HASHI_VAULT_ADDR = 'http://127.0.0.1:8200'
+
+if os.getenv('VAULT_ADDR') is not None:
+ ANSIBLE_HASHI_VAULT_ADDR = os.environ['VAULT_ADDR']
+
+class HashiVault:
+ def __init__(self, **kwargs):
+ try:
+ import hvac
+ except ImportError:
+ AnsibleError("Please pip install hvac to use this module")
+
+ self.url = kwargs.pop('url')
+ self.secret = kwargs.pop('secret')
+ self.token = kwargs.pop('token')
+
+ self.client = hvac.Client(url=self.url, token=self.token)
+
+ if self.client.is_authenticated():
+ pass
+ else:
+ raise AnsibleError("Invalid Hashicorp Vault Token Specified")
+
+ def get(self):
+ value = ""
+
+ data = self.client.read(self.secret)
+ if data == None:
+ raise AnsibleError("The secret %s doesn't seem to exist" % self.secret)
+ else:
+ return data['data']['value']
+
+
+
+class LookupModule(LookupBase):
+
+ def run(self, terms, variables, **kwargs):
+
+ vault_args = terms[0].split(' ')
+ vault_dict = {}
+ ret = []
+
+ for param in vault_args:
+ key, value = param.split('=')
+ vault_dict[key] = value
+
+ vault_conn = HashiVault(**vault_dict)
+
+ for term in terms:
+ key = term.split()[0]
+ value = vault_conn.get()
+ ret.append(value)
+ return ret
diff --git a/lib/ansible/plugins/lookup/indexed_items.py b/lib/ansible/plugins/lookup/indexed_items.py
index 9e242ac6bf..4721918f30 100644
--- a/lib/ansible/plugins/lookup/indexed_items.py
+++ b/lib/ansible/plugins/lookup/indexed_items.py
@@ -27,6 +27,7 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
+ ### FIXME: Is this needed now that listify is run on all lookup plugin terms?
if not isinstance(terms, list):
raise AnsibleError("with_indexed_items expects a list")
diff --git a/lib/ansible/plugins/lookup/ini.py b/lib/ansible/plugins/lookup/ini.py
new file mode 100644
index 0000000000..7ea8f92aaf
--- /dev/null
+++ b/lib/ansible/plugins/lookup/ini.py
@@ -0,0 +1,93 @@
+# (c) 2015, Yannig Perre <yannig.perre(at)gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import StringIO
+import os
+import codecs
+import ConfigParser
+import re
+
+from ansible.errors import *
+from ansible.plugins.lookup import LookupBase
+
+class LookupModule(LookupBase):
+
+ def read_properties(self, filename, key, dflt, is_regexp):
+ config = StringIO.StringIO()
+ config.write('[java_properties]\n' + open(filename).read())
+ config.seek(0, os.SEEK_SET)
+ self.cp.readfp(config)
+ return self.get_value(key, 'java_properties', dflt, is_regexp)
+
+ def read_ini(self, filename, key, section, dflt, is_regexp):
+ self.cp.readfp(open(filename))
+ return self.get_value(key, section, dflt, is_regexp)
+
+ def get_value(self, key, section, dflt, is_regexp):
+ # Retrieve all values from a section using a regexp
+ if is_regexp:
+ return [v for k, v in self.cp.items(section) if re.match(key, k)]
+ value = None
+ # Retrieve a single value
+ try:
+ value = self.cp.get(section, key)
+ except ConfigParser.NoOptionError, e:
+ return dflt
+ return value
+
+ def run(self, terms, variables=None, **kwargs):
+
+ basedir = self.get_basedir(variables)
+ self.basedir = basedir
+ self.cp = ConfigParser.ConfigParser()
+
+ ret = []
+ for term in terms:
+ params = term.split()
+ key = params[0]
+
+ paramvals = {
+ 'file' : 'ansible.ini',
+ 're' : False,
+ 'default' : None,
+ 'section' : "global",
+ 'type' : "ini",
+ }
+
+ # parameters specified?
+ try:
+ for param in params[1:]:
+ name, value = param.split('=')
+ assert(name in paramvals)
+ paramvals[name] = value
+ except (ValueError, AssertionError), e:
+ raise errors.AnsibleError(e)
+
+ path = self._loader.path_dwim_relative(basedir, 'files', paramvals['file'])
+ if paramvals['type'] == "properties":
+ var = self.read_properties(path, key, paramvals['default'], paramvals['re'])
+ else:
+ var = self.read_ini(path, key, paramvals['section'], paramvals['default'], paramvals['re'])
+ if var is not None:
+ if type(var) is list:
+ for v in var:
+ ret.append(v)
+ else:
+ ret.append(var)
+ return ret
diff --git a/lib/ansible/plugins/lookup/inventory_hostnames.py b/lib/ansible/plugins/lookup/inventory_hostnames.py
index d09dec0c7b..046e148259 100644
--- a/lib/ansible/plugins/lookup/inventory_hostnames.py
+++ b/lib/ansible/plugins/lookup/inventory_hostnames.py
@@ -25,6 +25,7 @@ from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, inject=None, **kwargs):
+ ### FIXME: Is this needed now that listify is run on all lookup plugin terms?
if not isinstance(terms, list):
raise AnsibleError("with_inventory_hostnames expects a list")
diff --git a/lib/ansible/plugins/lookup/items.py b/lib/ansible/plugins/lookup/items.py
index 65ff66d854..43bb77e144 100644
--- a/lib/ansible/plugins/lookup/items.py
+++ b/lib/ansible/plugins/lookup/items.py
@@ -23,8 +23,5 @@ class LookupModule(LookupBase):
def run(self, terms, **kwargs):
- if not isinstance(terms, list):
- terms = [ terms ]
-
return self._flatten(terms)
diff --git a/lib/ansible/plugins/lookup/list.py b/lib/ansible/plugins/lookup/list.py
new file mode 100644
index 0000000000..cd92718b38
--- /dev/null
+++ b/lib/ansible/plugins/lookup/list.py
@@ -0,0 +1,25 @@
+# (c) 2012-15, Ansible, Inc
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division)
+__metaclass__ = type
+
+from ansible.plugins.lookup import LookupBase
+
+class LookupModule(LookupBase):
+
+ def run(self, terms, **kwargs):
+ return terms
diff --git a/lib/ansible/plugins/lookup/nested.py b/lib/ansible/plugins/lookup/nested.py
index 52f4bed1d5..ff865c28ee 100644
--- a/lib/ansible/plugins/lookup/nested.py
+++ b/lib/ansible/plugins/lookup/nested.py
@@ -17,22 +17,29 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.errors import AnsibleError
+from jinja2.exceptions import UndefinedError
+
+from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.plugins.lookup import LookupBase
from ansible.utils.listify import listify_lookup_plugin_terms
class LookupModule(LookupBase):
- def __lookup_variabless(self, terms, variables):
+ def _lookup_variables(self, terms, variables):
+ foo = variables.copy()
+ foo.pop('vars')
results = []
for x in terms:
- intermediate = listify_lookup_plugin_terms(x, variables, loader=self._loader)
+ try:
+ intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader, fail_on_undefined=True)
+ except UndefinedError, e:
+ raise AnsibleUndefinedVariable("One of the nested variables was undefined. The error was: %s" % e)
results.append(intermediate)
return results
def run(self, terms, variables=None, **kwargs):
- terms = self.__lookup_variabless(terms, variables)
+ terms = self._lookup_variables(terms, variables)
my_list = terms[:]
my_list.reverse()
diff --git a/lib/ansible/plugins/lookup/password.py b/lib/ansible/plugins/lookup/password.py
index 47ec786429..3c80e6811f 100644
--- a/lib/ansible/plugins/lookup/password.py
+++ b/lib/ansible/plugins/lookup/password.py
@@ -59,9 +59,6 @@ class LookupModule(LookupBase):
ret = []
- if not isinstance(terms, list):
- terms = [ terms ]
-
for term in terms:
# you can't have escaped spaces in yor pathname
params = term.split()
diff --git a/lib/ansible/plugins/lookup/pipe.py b/lib/ansible/plugins/lookup/pipe.py
index d9f74708b2..71b0ed9777 100644
--- a/lib/ansible/plugins/lookup/pipe.py
+++ b/lib/ansible/plugins/lookup/pipe.py
@@ -26,9 +26,6 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
- if isinstance(terms, basestring):
- terms = [ terms ]
-
ret = []
for term in terms:
'''
diff --git a/lib/ansible/plugins/lookup/redis_kv.py b/lib/ansible/plugins/lookup/redis_kv.py
index e499e83f93..982cceebef 100644
--- a/lib/ansible/plugins/lookup/redis_kv.py
+++ b/lib/ansible/plugins/lookup/redis_kv.py
@@ -43,9 +43,6 @@ class LookupModule(LookupBase):
if not HAVE_REDIS:
raise AnsibleError("Can't LOOKUP(redis_kv): module redis is not installed")
- if not isinstance(terms, list):
- terms = [ terms ]
-
ret = []
for term in terms:
(url,key) = term.split(',')
diff --git a/lib/ansible/plugins/lookup/sequence.py b/lib/ansible/plugins/lookup/sequence.py
index 1e66626b68..a6d133b3f9 100644
--- a/lib/ansible/plugins/lookup/sequence.py
+++ b/lib/ansible/plugins/lookup/sequence.py
@@ -22,7 +22,6 @@ from re import compile as re_compile, IGNORECASE
from ansible.errors import *
from ansible.parsing.splitter import parse_kv
from ansible.plugins.lookup import LookupBase
-from ansible.template import Templar
# shortcut format
NUM = "(0?x?[0-9a-f]+)"
@@ -143,13 +142,9 @@ class LookupModule(LookupBase):
def sanity_check(self):
if self.count is None and self.end is None:
- raise AnsibleError(
- "must specify count or end in with_sequence"
- )
+ raise AnsibleError( "must specify count or end in with_sequence")
elif self.count is not None and self.end is not None:
- raise AnsibleError(
- "can't specify both count and end in with_sequence"
- )
+ raise AnsibleError( "can't specify both count and end in with_sequence")
elif self.count is not None:
# convert count to end
if self.count != 0:
@@ -167,7 +162,7 @@ class LookupModule(LookupBase):
raise AnsibleError("bad formatting string: %s" % self.format)
def generate_sequence(self):
- if self.stride > 0:
+ if self.stride >= 0:
adjust = 1
else:
adjust = -1
@@ -185,16 +180,9 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
results = []
- if isinstance(terms, basestring):
- terms = [ terms ]
-
- templar = Templar(loader=self._loader, variables=variables)
-
for term in terms:
try:
self.reset() # clear out things for this iteration
-
- term = templar.template(term)
try:
if not self.parse_simple_args(term):
self.parse_kv_args(parse_kv(term))
diff --git a/lib/ansible/plugins/lookup/shelvefile.py b/lib/ansible/plugins/lookup/shelvefile.py
new file mode 100644
index 0000000000..89e393694b
--- /dev/null
+++ b/lib/ansible/plugins/lookup/shelvefile.py
@@ -0,0 +1,83 @@
+# (c) 2015, Alejandro Guirao <lekumberri@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import shelve
+import os
+
+from ansible.errors import AnsibleError
+from ansible.plugins.lookup import LookupBase
+
+class LookupModule(LookupBase):
+
+
+ def read_shelve(self, shelve_filename, key):
+ """
+ Read the value of "key" from a shelve file
+ """
+ d = shelve.open(shelve_filename)
+ res = d.get(key, None)
+ d.close()
+ return res
+
+ def run(self, terms, variables=None, **kwargs):
+
+ if not isinstance(terms, list):
+ terms = [ terms ]
+
+ ret = []
+
+ for term in terms:
+ playbook_path = None
+ relative_path = None
+
+ paramvals = {"file": None, "key": None}
+ params = term.split()
+
+ try:
+ for param in params:
+ name, value = param.split('=')
+ assert(name in paramvals)
+ paramvals[name] = value
+
+ except (ValueError, AssertionError), e:
+ # In case "file" or "key" are not present
+ raise AnsibleError(e)
+
+ file = paramvals['file']
+ key = paramvals['key']
+ basedir_path = self._loader.path_dwim(file)
+
+ # Search also in the role/files directory and in the playbook directory
+ if 'role_path' in variables:
+ relative_path = self._loader.path_dwim_relative(variables['role_path'], 'files', file)
+ if 'playbook_dir' in variables:
+ playbook_path = self._loader.path_dwim_relative(variables['playbook_dir'],'files', file)
+
+ for path in (basedir_path, relative_path, playbook_path):
+ if path and os.path.exists(path):
+ res = self.read_shelve(path, key)
+ if res is None:
+ raise AnsibleError("Key %s not found in shelve file %s" % (key, file))
+ # Convert the value read to string
+ ret.append(str(res))
+ break
+ else:
+ raise AnsibleError("Could not locate shelve file in lookup: %s" % file)
+
+ return ret
diff --git a/lib/ansible/plugins/lookup/subelements.py b/lib/ansible/plugins/lookup/subelements.py
index d8c2b1086e..5ac384f6f1 100644
--- a/lib/ansible/plugins/lookup/subelements.py
+++ b/lib/ansible/plugins/lookup/subelements.py
@@ -33,8 +33,8 @@ class LookupModule(LookupBase):
raise AnsibleError(
"subelements lookup expects a list of two or three items, "
+ msg)
- terms = listify_lookup_plugin_terms(terms, variables, loader=self._loader)
- terms[0] = listify_lookup_plugin_terms(terms[0], variables, loader=self._loader)
+
+ terms[0] = listify_lookup_plugin_terms(terms[0], templar=self._templar, loader=self._loader)
# check lookup terms - check number of terms
if not isinstance(terms, list) or not 2 <= len(terms) <= 3:
diff --git a/lib/ansible/plugins/lookup/template.py b/lib/ansible/plugins/lookup/template.py
index e40247bbc3..16b86c2de5 100644
--- a/lib/ansible/plugins/lookup/template.py
+++ b/lib/ansible/plugins/lookup/template.py
@@ -19,23 +19,18 @@ __metaclass__ = type
import os
+from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
-from ansible.template import Templar
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
- if not isinstance(terms, list):
- terms = [ terms ]
-
basedir = self.get_basedir(variables)
ret = []
- templar = Templar(loader=self._loader, variables=variables)
-
for term in terms:
self._display.debug("File lookup term: %s" % term)
@@ -44,7 +39,13 @@ class LookupModule(LookupBase):
if lookupfile and os.path.exists(lookupfile):
with open(lookupfile, 'r') as f:
template_data = f.read()
- res = templar.template(template_data, preserve_trailing_newlines=True)
+
+ self._templar.environment.searchpath = [self._loader._basedir, os.path.dirname(lookupfile)]
+ if 'role_path' in variables:
+ self._templar.environment.searchpath.insert(1, C.DEFAULT_ROLES_PATH)
+ self._templar.environment.searchpath.insert(1, variables['role_path'])
+
+ res = self._templar.template(template_data, preserve_trailing_newlines=True)
ret.append(res)
else:
raise AnsibleError("the template file %s could not be found for the lookup" % term)
diff --git a/lib/ansible/plugins/lookup/together.py b/lib/ansible/plugins/lookup/together.py
index 2f53121cc8..ac94a414f2 100644
--- a/lib/ansible/plugins/lookup/together.py
+++ b/lib/ansible/plugins/lookup/together.py
@@ -31,16 +31,16 @@ class LookupModule(LookupBase):
[1, 2], [3] -> [1, 3], [2, None]
"""
- def __lookup_variabless(self, terms, variables):
+ def _lookup_variables(self, terms):
results = []
for x in terms:
- intermediate = listify_lookup_plugin_terms(x, variables, loader=self._loader)
+ intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader)
results.append(intermediate)
return results
def run(self, terms, variables=None, **kwargs):
- terms = self.__lookup_variabless(terms, variables)
+ terms = self._lookup_variables(terms)
my_list = terms[:]
if len(my_list) == 0:
diff --git a/lib/ansible/plugins/lookup/url.py b/lib/ansible/plugins/lookup/url.py
index 216b07d1f8..a893493db4 100644
--- a/lib/ansible/plugins/lookup/url.py
+++ b/lib/ansible/plugins/lookup/url.py
@@ -29,9 +29,6 @@ class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
- if isinstance(terms, basestring):
- terms = [ terms ]
-
validate_certs = kwargs.get('validate_certs', True)
ret = []
diff --git a/lib/ansible/plugins/shell/powershell.py b/lib/ansible/plugins/shell/powershell.py
index 3377d5786f..0e16d34e16 100644
--- a/lib/ansible/plugins/shell/powershell.py
+++ b/lib/ansible/plugins/shell/powershell.py
@@ -24,7 +24,9 @@ import random
import shlex
import time
-_common_args = ['PowerShell', '-NoProfile', '-NonInteractive']
+from ansible.utils.unicode import to_bytes, to_unicode
+
+_common_args = ['PowerShell', '-NoProfile', '-NonInteractive', '-ExecutionPolicy', 'Unrestricted']
# Primarily for testing, allow explicitly specifying PowerShell version via
# an environment variable.
@@ -38,24 +40,32 @@ class ShellModule(object):
return ''
def join_path(self, *args):
- return os.path.join(*args).replace('/', '\\')
+ parts = []
+ for arg in args:
+ arg = self._unquote(arg).replace('/', '\\')
+ parts.extend([a for a in arg.split('\\') if a])
+ path = '\\'.join(parts)
+ if path.startswith('~'):
+ return path
+ return '"%s"' % path
def path_has_trailing_slash(self, path):
# Allow Windows paths to be specified using either slash.
+ path = self._unquote(path)
return path.endswith('/') or path.endswith('\\')
def chmod(self, mode, path):
return ''
def remove(self, path, recurse=False):
- path = self._escape(path)
+ path = self._escape(self._unquote(path))
if recurse:
return self._encode_script('''Remove-Item "%s" -Force -Recurse;''' % path)
else:
return self._encode_script('''Remove-Item "%s" -Force;''' % path)
def mkdtemp(self, basefile, system=False, mode=None):
- basefile = self._escape(basefile)
+ basefile = self._escape(self._unquote(basefile))
# FIXME: Support system temp path!
return self._encode_script('''(New-Item -Type Directory -Path $env:temp -Name "%s").FullName | Write-Host -Separator '';''' % basefile)
@@ -63,16 +73,17 @@ class ShellModule(object):
# PowerShell only supports "~" (not "~username"). Resolve-Path ~ does
# not seem to work remotely, though by default we are always starting
# in the user's home directory.
+ user_home_path = self._unquote(user_home_path)
if user_home_path == '~':
script = 'Write-Host (Get-Location).Path'
elif user_home_path.startswith('~\\'):
- script = 'Write-Host ((Get-Location).Path + "%s")' % _escape(user_home_path[1:])
+ script = 'Write-Host ((Get-Location).Path + "%s")' % self._escape(user_home_path[1:])
else:
- script = 'Write-Host "%s"' % _escape(user_home_path)
+ script = 'Write-Host "%s"' % self._escape(user_home_path)
return self._encode_script(script)
def checksum(self, path, *args, **kwargs):
- path = self._escape(path)
+ path = self._escape(self._unquote(path))
script = '''
If (Test-Path -PathType Leaf "%(path)s")
{
@@ -93,16 +104,37 @@ class ShellModule(object):
return self._encode_script(script)
def build_module_command(self, env_string, shebang, cmd, rm_tmp=None):
- cmd = cmd.encode('utf-8')
- cmd_parts = shlex.split(cmd, posix=False)
- if not cmd_parts[0].lower().endswith('.ps1'):
- cmd_parts[0] = '%s.ps1' % cmd_parts[0]
- script = self._build_file_cmd(cmd_parts)
+ cmd_parts = shlex.split(to_bytes(cmd), posix=False)
+ cmd_parts = map(to_unicode, cmd_parts)
+ if shebang and shebang.lower() == '#!powershell':
+ if not self._unquote(cmd_parts[0]).lower().endswith('.ps1'):
+ cmd_parts[0] = '"%s.ps1"' % self._unquote(cmd_parts[0])
+ cmd_parts.insert(0, '&')
+ elif shebang and shebang.startswith('#!'):
+ cmd_parts.insert(0, shebang[2:])
+ catch = '''
+ $_obj = @{ failed = $true; $msg = $_ }
+ echo $_obj | ConvertTo-Json -Compress -Depth 99
+ Exit 1
+ '''
+ script = 'Try { %s }\nCatch { %s }' % (' '.join(cmd_parts), 'throw')
if rm_tmp:
- rm_tmp = self._escape(rm_tmp)
- script = '%s; Remove-Item "%s" -Force -Recurse;' % (script, rm_tmp)
+ rm_tmp = self._escape(self._unquote(rm_tmp))
+ rm_cmd = 'Remove-Item "%s" -Force -Recurse -ErrorAction SilentlyContinue' % rm_tmp
+ script = '%s\nFinally { %s }' % (script, rm_cmd)
return self._encode_script(script)
+ def _unquote(self, value):
+ '''Remove any matching quotes that wrap the given value.'''
+ value = to_unicode(value or '')
+ m = re.match(r'^\s*?\'(.*?)\'\s*?$', value)
+ if m:
+ return m.group(1)
+ m = re.match(r'^\s*?"(.*?)"\s*?$', value)
+ if m:
+ return m.group(1)
+ return value
+
def _escape(self, value, include_vars=False):
'''Return value escaped for use in PowerShell command.'''
# http://www.techotopia.com/index.php/Windows_PowerShell_1.0_String_Quoting_and_Escape_Sequences
@@ -119,14 +151,10 @@ class ShellModule(object):
def _encode_script(self, script, as_list=False):
'''Convert a PowerShell script to a single base64-encoded command.'''
+ script = to_unicode(script)
script = '\n'.join([x.strip() for x in script.splitlines() if x.strip()])
encoded_script = base64.b64encode(script.encode('utf-16-le'))
cmd_parts = _common_args + ['-EncodedCommand', encoded_script]
if as_list:
return cmd_parts
return ' '.join(cmd_parts)
-
- def _build_file_cmd(self, cmd_parts):
- '''Build command line to run a file, given list of file name plus args.'''
- return ' '.join(_common_args + ['-ExecutionPolicy', 'Unrestricted', '-File'] + ['"%s"' % x for x in cmd_parts])
-
diff --git a/lib/ansible/plugins/shell/sh.py b/lib/ansible/plugins/shell/sh.py
index 70ec91d6e5..1464fd09fa 100644
--- a/lib/ansible/plugins/shell/sh.py
+++ b/lib/ansible/plugins/shell/sh.py
@@ -65,9 +65,14 @@ class ShellModule(object):
if system and (basetmp.startswith('$HOME') or basetmp.startswith('~/')):
basetmp = self.join_path('/tmp', basefile)
cmd = 'mkdir -p "%s"' % basetmp
- if mode:
- cmd += ' && chmod %s "%s"' % (mode, basetmp)
cmd += ' && echo "%s"' % basetmp
+
+ # change the umask in a subshell to achieve the desired mode
+ # also for directories created with `mkdir -p`
+ if mode:
+ tmp_umask = 0777 & ~mode
+ cmd = '(umask %o && %s)' % (tmp_umask, cmd)
+
return cmd
def expand_user(self, user_home_path):
diff --git a/lib/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
index 83ddd1d2c3..f6bb3b03aa 100644
--- a/lib/ansible/plugins/strategies/__init__.py
+++ b/lib/ansible/plugins/strategies/__init__.py
@@ -205,14 +205,8 @@ class StrategyBase:
# lookup the role in the ROLE_CACHE to make sure we're dealing
# with the correct object and mark it as executed
for (entry, role_obj) in iterator._play.ROLE_CACHE[task_result._task._role._role_name].iteritems():
- params = task_result._task._role._role_params
- if task_result._task._role.tags is not None:
- params['tags'] = task_result._task._role.tags
- if task_result._task._role.when is not None:
- params['when'] = task_result._task._role.when
- hashed_entry = hash_params(params)
- if entry == hashed_entry:
- role_obj._had_task_run = True
+ if role_obj._uuid == task_result._task._role._uuid:
+ role_obj._had_task_run[host.name] = True
ret_results.append(task_result)
@@ -412,7 +406,9 @@ class StrategyBase:
# set the vars for this task from those specified as params to the include
for b in block_list:
- b.vars = included_file._args.copy()
+ temp_vars = b._task_include.vars.copy()
+ temp_vars.update(included_file._args.copy())
+ b._task_include.vars = temp_vars
return block_list
@@ -510,3 +506,21 @@ class StrategyBase:
self._display.banner(msg)
return ret
+
+ def _execute_meta(self, task, play_context, iterator):
+
+ # meta tasks store their args in the _raw_params field of args,
+ # since they do not use k=v pairs, so get that
+ meta_action = task.args.get('_raw_params')
+
+ if meta_action == 'noop':
+ # FIXME: issue a callback for the noop here?
+ pass
+ elif meta_action == 'flush_handlers':
+ self.run_handlers(iterator, play_context)
+ elif meta_action == 'refresh_inventory':
+ self._inventory.refresh_inventory()
+ #elif meta_action == 'reset_connection':
+ # connection_info.connection.close()
+ else:
+ raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
diff --git a/lib/ansible/plugins/strategies/free.py b/lib/ansible/plugins/strategies/free.py
index 683a96ba7e..5bc0d8db36 100644
--- a/lib/ansible/plugins/strategies/free.py
+++ b/lib/ansible/plugins/strategies/free.py
@@ -21,6 +21,8 @@ __metaclass__ = type
import time
+from ansible.errors import *
+from ansible.playbook.included_file import IncludedFile
from ansible.plugins.strategies import StrategyBase
try:
@@ -53,7 +55,7 @@ class StrategyModule(StrategyBase):
work_to_do = True
while work_to_do and not self._tqm._terminated:
- hosts_left = self.get_hosts_remaining(iterator._play)
+ hosts_left = self._inventory.get_hosts(iterator._play.hosts)
if len(hosts_left) == 0:
self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
result = False
@@ -95,7 +97,7 @@ class StrategyModule(StrategyBase):
# check to see if this task should be skipped, due to it being a member of a
# role which has already run (and whether that role allows duplicate execution)
- if task._role and task._role.has_run():
+ if task._role and task._role.has_run(host):
# If there is no metadata, the default behavior is to not allow duplicates,
# if there is metadata, check to see if the allow_duplicates flag was set to true
if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
@@ -140,6 +142,31 @@ class StrategyModule(StrategyBase):
results = self._process_pending_results(iterator)
host_results.extend(results)
+ try:
+ included_files = IncludedFile.process_include_results(host_results, self._tqm, iterator=iterator, loader=self._loader, variable_manager=self._variable_manager)
+ except AnsibleError, e:
+ return False
+
+ if len(included_files) > 0:
+ for included_file in included_files:
+ # included hosts get the task list while those excluded get an equal-length
+ # list of noop tasks, to make sure that they continue running in lock-step
+ try:
+ new_blocks = self._load_included_file(included_file, iterator=iterator)
+ except AnsibleError, e:
+ for host in included_file._hosts:
+ iterator.mark_host_failed(host)
+ self._display.warning(str(e))
+ continue
+
+ for host in hosts_left:
+ if host in included_file._hosts:
+ task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=included_file._task)
+ final_blocks = []
+ for new_block in new_blocks:
+ final_blocks.append(new_block.filter_tagged_tasks(play_context, task_vars))
+ iterator.add_tasks(host, final_blocks)
+
# pause briefly so we don't spin lock
time.sleep(0.05)
@@ -149,10 +176,9 @@ class StrategyModule(StrategyBase):
except Exception as e:
# FIXME: ctrl+c can cause some failures here, so catch them
# with the appropriate error type
- print("wtf: %s" % e)
pass
# run the base class run() method, which executes the cleanup function
# and runs any outstanding handlers which have been triggered
- super(StrategyModule, self).run(iterator, play_context)
+ return super(StrategyModule, self).run(iterator, play_context, result)
diff --git a/lib/ansible/plugins/strategies/linear.py b/lib/ansible/plugins/strategies/linear.py
index 6de217f3f1..527843c692 100644
--- a/lib/ansible/plugins/strategies/linear.py
+++ b/lib/ansible/plugins/strategies/linear.py
@@ -58,6 +58,9 @@ class StrategyModule(StrategyBase):
continue
(s, t) = v
+ if t is None:
+ continue
+
if s.cur_block < lowest_cur_block and s.run_state != PlayIterator.ITERATING_COMPLETE:
lowest_cur_block = s.cur_block
@@ -85,10 +88,12 @@ class StrategyModule(StrategyBase):
if host_state_task is None:
continue
(s, t) = host_state_task
+ if t is None:
+ continue
if s.run_state == cur_state and s.cur_block == cur_block:
new_t = iterator.get_next_task_for_host(host)
#if new_t != t:
- # raise AnsibleError("iterator error, wtf?")
+ # raise AnsibleError("iterator error, wtf?") FIXME
rvals.append((host, t))
else:
rvals.append((host, noop_task))
@@ -170,7 +175,7 @@ class StrategyModule(StrategyBase):
# check to see if this task should be skipped, due to it being a member of a
# role which has already run (and whether that role allows duplicate execution)
- if task._role and task._role.has_run():
+ if task._role and task._role.has_run(host):
# If there is no metadata, the default behavior is to not allow duplicates,
# if there is metadata, check to see if the allow_duplicates flag was set to true
if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
@@ -178,24 +183,15 @@ class StrategyModule(StrategyBase):
continue
if task.action == 'meta':
- # meta tasks store their args in the _raw_params field of args,
- # since they do not use k=v pairs, so get that
- meta_action = task.args.get('_raw_params')
- if meta_action == 'noop':
- # FIXME: issue a callback for the noop here?
- continue
- elif meta_action == 'flush_handlers':
- self.run_handlers(iterator, play_context)
- else:
- raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
+ self._execute_meta(task, play_context, iterator)
else:
# handle step if needed, skip meta actions as they are used internally
if self._step and choose_step:
if self._take_step(task):
choose_step = False
else:
- break
skip_rest = True
+ break
self._display.debug("getting variables")
task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task)
diff --git a/lib/ansible/plugins/test/__init__.py b/lib/ansible/plugins/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/ansible/plugins/test/__init__.py
diff --git a/lib/ansible/plugins/test/core.py b/lib/ansible/plugins/test/core.py
new file mode 100644
index 0000000000..daf2240211
--- /dev/null
+++ b/lib/ansible/plugins/test/core.py
@@ -0,0 +1,101 @@
+# (c) 2012, Jeroen Hoekx <jeroen@hoekx.be>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+import re
+from ansible import errors
+
+def failed(*a, **kw):
+ ''' Test if task result yields failed '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|failed expects a dictionary")
+ rc = item.get('rc',0)
+ failed = item.get('failed',False)
+ if rc != 0 or failed:
+ return True
+ else:
+ return False
+
+def success(*a, **kw):
+ ''' Test if task result yields success '''
+ return not failed(*a, **kw)
+
+def changed(*a, **kw):
+ ''' Test if task result yields changed '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|changed expects a dictionary")
+ if not 'changed' in item:
+ changed = False
+ if ('results' in item # some modules return a 'results' key
+ and type(item['results']) == list
+ and type(item['results'][0]) == dict):
+ for result in item['results']:
+ changed = changed or result.get('changed', False)
+ else:
+ changed = item.get('changed', False)
+ return changed
+
+def skipped(*a, **kw):
+ ''' Test if task result yields skipped '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|skipped expects a dictionary")
+ skipped = item.get('skipped', False)
+ return skipped
+
+def regex(value='', pattern='', ignorecase=False, match_type='search'):
+ ''' Expose `re` as a boolean filter using the `search` method by default.
+ This is likely only useful for `search` and `match` which already
+ have their own filters.
+ '''
+ if ignorecase:
+ flags = re.I
+ else:
+ flags = 0
+ _re = re.compile(pattern, flags=flags)
+ _bool = __builtins__.get('bool')
+ return _bool(getattr(_re, match_type, 'search')(value))
+
+def match(value, pattern='', ignorecase=False):
+ ''' Perform a `re.match` returning a boolean '''
+ return regex(value, pattern, ignorecase, 'match')
+
+def search(value, pattern='', ignorecase=False):
+ ''' Perform a `re.search` returning a boolean '''
+ return regex(value, pattern, ignorecase, 'search')
+
+class TestModule(object):
+ ''' Ansible core jinja2 tests '''
+
+ def tests(self):
+ return {
+ # failure testing
+ 'failed' : failed,
+ 'success' : success,
+
+ # changed testing
+ 'changed' : changed,
+
+ # skip testing
+ 'skipped' : skipped,
+
+ # regex
+ 'match': match,
+ 'search': search,
+ 'regex': regex,
+ }
diff --git a/lib/ansible/plugins/test/files.py b/lib/ansible/plugins/test/files.py
new file mode 100644
index 0000000000..0c46b53160
--- /dev/null
+++ b/lib/ansible/plugins/test/files.py
@@ -0,0 +1,37 @@
+# (c) 2015, Ansible, Inc
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+from os.path import isdir, isfile, isabs, exists, lexists, islink, samefile, ismount
+from ansible import errors
+
+class TestModule(object):
+ ''' Ansible file jinja2 tests '''
+
+ def tests(self):
+ return {
+ # file testing
+ 'is_dir' : isdir,
+ 'is_file' : isfile,
+ 'is_link' : islink,
+ 'exists' : exists,
+ 'link_exists' : lexists,
+
+ # path testing
+ 'is_abs' : isabs,
+ 'is_same_file' : samefile,
+ 'is_mount' : ismount,
+ }
diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py
index 6158611a30..1a1465139a 100644
--- a/lib/ansible/template/__init__.py
+++ b/lib/ansible/template/__init__.py
@@ -30,13 +30,14 @@ from jinja2.runtime import StrictUndefined
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
-from ansible.plugins import _basedirs, filter_loader, lookup_loader
+from ansible.plugins import _basedirs, filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.debug import debug
from numbers import Number
+from types import NoneType
__all__ = ['Templar']
@@ -47,7 +48,6 @@ __all__ = ['Templar']
NON_TEMPLATED_TYPES = ( bool, Number )
JINJA2_OVERRIDE = '#jinja2:'
-JINJA2_ALLOWED_OVERRIDES = frozenset(['trim_blocks', 'lstrip_blocks', 'newline_sequence', 'keep_trailing_newline'])
class Templar:
'''
@@ -57,6 +57,7 @@ class Templar:
def __init__(self, loader, shared_loader_obj=None, variables=dict()):
self._loader = loader
self._filters = None
+ self._tests = None
self._available_variables = variables
if loader:
@@ -65,8 +66,6 @@ class Templar:
self._basedir = './'
if shared_loader_obj:
- global _basedirs
- _basedirs = shared_loader_obj.basedirs[:]
self._filter_loader = getattr(shared_loader_obj, 'filter_loader')
self._lookup_loader = getattr(shared_loader_obj, 'lookup_loader')
else:
@@ -84,7 +83,7 @@ class Templar:
undefined=StrictUndefined,
extensions=self._get_extensions(),
finalize=self._finalize,
- loader=FileSystemLoader('.'),
+ loader=FileSystemLoader(self._basedir),
)
self.environment.template_class = AnsibleJ2Template
@@ -118,11 +117,28 @@ class Templar:
self._filters = dict()
for fp in plugins:
self._filters.update(fp.filters())
+ self._filters.update(self._get_tests())
return self._filters.copy()
+ def _get_tests(self):
+ '''
+ Returns tests plugins, after loading and caching them if need be
+ '''
+
+ if self._tests is not None:
+ return self._tests.copy()
+
+ plugins = [x for x in test_loader.all()]
+
+ self._tests = dict()
+ for fp in plugins:
+ self._tests.update(fp.tests())
+
+ return self._tests.copy()
+
def _get_extensions(self):
- '''
+ '''
Return jinja2 extensions to load.
If some extensions are set via jinja_extensions in ansible.cfg, we try
@@ -172,6 +188,8 @@ class Templar:
resolved_val = self._available_variables[var_name]
if isinstance(resolved_val, NON_TEMPLATED_TYPES):
return resolved_val
+ elif isinstance(resolved_val, NoneType):
+ return C.DEFAULT_NULL_REPRESENTATION
result = self._do_template(variable, preserve_trailing_newlines=preserve_trailing_newlines, fail_on_undefined=fail_on_undefined, overrides=overrides)
@@ -233,14 +251,16 @@ class Templar:
return thing if thing is not None else ''
def _lookup(self, name, *args, **kwargs):
- instance = self._lookup_loader.get(name.lower(), loader=self._loader)
+ instance = self._lookup_loader.get(name.lower(), loader=self._loader, templar=self)
if instance is not None:
+ from ansible.utils.listify import listify_lookup_plugin_terms
+ loop_terms = listify_lookup_plugin_terms(terms=args, templar=self, loader=self._loader, fail_on_undefined=True, convert_bare=False)
# safely catch run failures per #5059
try:
- ran = instance.run(*args, variables=self._available_variables, **kwargs)
- except (AnsibleUndefinedVariable, UndefinedError):
- raise
+ ran = instance.run(loop_terms, variables=self._available_variables, **kwargs)
+ except (AnsibleUndefinedVariable, UndefinedError) as e:
+ raise AnsibleUndefinedVariable(e)
except Exception, e:
if self._fail_on_lookup_errors:
raise
@@ -261,7 +281,6 @@ class Templar:
if overrides is None:
myenv = self.environment.overlay()
else:
- overrides = JINJA2_ALLOWED_OVERRIDES.intersection(set(overrides))
myenv = self.environment.overlay(overrides)
# Get jinja env overrides from template
@@ -272,11 +291,11 @@ class Templar:
for pair in line.split(','):
(key,val) = pair.split(':')
key = key.strip()
- if key in JINJA2_ALLOWED_OVERRIDES:
- setattr(myenv, key, ast.literal_eval(val.strip()))
+ setattr(myenv, key, ast.literal_eval(val.strip()))
#FIXME: add tests
myenv.filters.update(self._get_filters())
+ myenv.tests.update(self._get_tests())
try:
t = myenv.from_string(data)
@@ -321,7 +340,7 @@ class Templar:
return res
except (UndefinedError, AnsibleUndefinedVariable), e:
if fail_on_undefined:
- raise
+ raise AnsibleUndefinedVariable(e)
else:
#TODO: return warning about undefined var
return data
diff --git a/lib/ansible/template/safe_eval.py b/lib/ansible/template/safe_eval.py
index 2689949504..5e2d1e1fe3 100644
--- a/lib/ansible/template/safe_eval.py
+++ b/lib/ansible/template/safe_eval.py
@@ -23,7 +23,7 @@ import sys
from six.moves import builtins
from ansible import constants as C
-from ansible.plugins import filter_loader
+from ansible.plugins import filter_loader, test_loader
def safe_eval(expr, locals={}, include_exceptions=False):
'''
@@ -77,7 +77,11 @@ def safe_eval(expr, locals={}, include_exceptions=False):
for filter in filter_loader.all():
filter_list.extend(filter.filters().keys())
- CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list
+ test_list = []
+ for test in test_loader.all():
+ test_list.extend(test.tests().keys())
+
+ CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list + test_list
class CleansingNodeVisitor(ast.NodeVisitor):
def generic_visit(self, node, inside_call=False):
diff --git a/lib/ansible/template/vars.py b/lib/ansible/template/vars.py
index 96051f4574..8f9af9506b 100644
--- a/lib/ansible/template/vars.py
+++ b/lib/ansible/template/vars.py
@@ -63,9 +63,6 @@ class AnsibleJ2Vars:
return False
def __getitem__(self, varname):
- # FIXME: are we still going to need HostVars?
- #from ansible.runner import HostVars
-
if varname not in self._templar._available_variables:
if varname in self._locals:
return self._locals[varname]
diff --git a/lib/ansible/utils/display.py b/lib/ansible/utils/display.py
index fa16b7af05..380f5b3327 100644
--- a/lib/ansible/utils/display.py
+++ b/lib/ansible/utils/display.py
@@ -19,6 +19,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import fcntl
import textwrap
import os
import random
@@ -27,6 +28,8 @@ import sys
import time
import logging
import getpass
+from struct import unpack, pack
+from termios import TIOCGWINSZ
from multiprocessing import Lock
from ansible import constants as C
@@ -57,6 +60,7 @@ class Display:
def __init__(self, verbosity=0):
+ self.columns = None
self.verbosity = verbosity
# list of all deprecation messages to prevent duplicate display
@@ -68,6 +72,7 @@ class Display:
self.noncow = os.getenv("ANSIBLE_COW_SELECTION",None)
self.set_cowsay_info()
+ self._set_column_width()
def set_cowsay_info(self):
@@ -161,16 +166,16 @@ class Display:
else:
raise AnsibleError("[DEPRECATED]: %s. Please update your playbooks." % msg)
- wrapped = textwrap.wrap(new_msg, 79)
+ wrapped = textwrap.wrap(new_msg, self.columns, replace_whitespace=False, drop_whitespace=False)
new_msg = "\n".join(wrapped) + "\n"
if new_msg not in self._deprecations:
- self.display(new_msg, color='purple', stderr=True)
+ self.display(new_msg.strip(), color='purple', stderr=True)
self._deprecations[new_msg] = 1
def warning(self, msg):
new_msg = "\n[WARNING]: %s" % msg
- wrapped = textwrap.wrap(new_msg, 79)
+ wrapped = textwrap.wrap(new_msg, self.columns)
new_msg = "\n".join(wrapped) + "\n"
if new_msg not in self._warns:
self.display(new_msg, color='bright purple', stderr=True)
@@ -194,7 +199,7 @@ class Display:
#FIXME: make this dynamic on tty size (look and ansible-doc)
msg = msg.strip()
- star_len = (80 - len(msg))
+ star_len = (79 - len(msg))
if star_len < 0:
star_len = 3
stars = "*" * star_len
@@ -217,7 +222,7 @@ class Display:
def error(self, msg, wrap_text=True):
if wrap_text:
new_msg = "\n[ERROR]: %s" % msg
- wrapped = textwrap.wrap(new_msg, 79)
+ wrapped = textwrap.wrap(new_msg, self.columns)
new_msg = "\n".join(wrapped) + "\n"
else:
new_msg = msg
@@ -239,3 +244,11 @@ class Display:
msg = to_bytes(msg)
return msg
+
+ def _set_column_width(self):
+ if os.isatty(0):
+ tty_size = unpack('HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[1]
+ else:
+ tty_size = 0
+ self.columns = max(79, tty_size)
+
diff --git a/lib/ansible/utils/listify.py b/lib/ansible/utils/listify.py
index 7bcf9ce802..1be5f6da6e 100644
--- a/lib/ansible/utils/listify.py
+++ b/lib/ansible/utils/listify.py
@@ -26,19 +26,16 @@ from ansible.template.safe_eval import safe_eval
__all__ = ['listify_lookup_plugin_terms']
#FIXME: probably just move this into lookup plugin base class
-def listify_lookup_plugin_terms(terms, variables, loader):
+def listify_lookup_plugin_terms(terms, templar, loader, fail_on_undefined=False, convert_bare=True):
if isinstance(terms, basestring):
stripped = terms.strip()
- templar = Templar(loader=loader, variables=variables)
-
#FIXME: warn/deprecation on bare vars in with_ so we can eventually remove fail on undefined override
- terms = templar.template(terms, convert_bare=True, fail_on_undefined=False)
-
- #TODO: check if this is needed as template should also return correct type already
- terms = safe_eval(terms)
+ terms = templar.template(terms, convert_bare=convert_bare, fail_on_undefined=fail_on_undefined)
+ else:
+ terms = templar.template(terms, fail_on_undefined=fail_on_undefined)
- if isinstance(terms, basestring) or not isinstance(terms, Iterable):
- terms = [ terms ]
+ if isinstance(terms, basestring) or not isinstance(terms, Iterable):
+ terms = [ terms ]
return terms
diff --git a/lib/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py
index 2a5be96690..86992f4d7e 100644
--- a/lib/ansible/vars/__init__.py
+++ b/lib/ansible/vars/__init__.py
@@ -190,7 +190,7 @@ class VariableManager:
if play:
all_vars = self._combine_vars(all_vars, play.get_vars())
-
+
for vars_file_item in play.get_vars_files():
try:
# create a set of temporary vars here, which incorporate the
@@ -215,11 +215,6 @@ class VariableManager:
break
else:
raise AnsibleError("vars file %s was not found" % vars_file_item)
- except AnsibleError, e:
- # FIXME: get_vars should probably be taking a flag to determine
- # whether or not vars files errors should be fatal at this
- # stage, or just base it on whether a host was specified?
- pass
except UndefinedError, e:
continue
@@ -258,9 +253,9 @@ class VariableManager:
all_vars['inventory_dir'] = self._inventory.basedir()
if play:
# add the list of hosts in the play, as adjusted for limit/filters
- # FIXME: play_hosts should be deprecated in favor of ansible_play_hosts,
- # however this would take work in the templating engine, so for now
- # we'll add both so we can give users something transitional to use
+ # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts,
+ # however this would take work in the templating engine, so for now
+ # we'll add both so we can give users something transitional to use
host_list = [x.name for x in self._inventory.get_hosts()]
all_vars['play_hosts'] = host_list
all_vars['ansible_play_hosts'] = host_list
@@ -275,7 +270,7 @@ class VariableManager:
copied_vars = all_vars.copy()
if 'hostvars' in copied_vars:
del copied_vars['hostvars']
- all_vars['vars'] = all_vars.copy()
+ all_vars['vars'] = copied_vars
#CACHED_VARS[cache_entry] = all_vars
diff --git a/lib/ansible/vars/hostvars.py b/lib/ansible/vars/hostvars.py
index 29d1e1aa80..af3d086ae8 100644
--- a/lib/ansible/vars/hostvars.py
+++ b/lib/ansible/vars/hostvars.py
@@ -19,13 +19,16 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import collections
+
from jinja2 import Undefined as j2undefined
from ansible.template import Templar
__all__ = ['HostVars']
-class HostVars(dict):
+# Note -- this is a Mapping, not a MutableMapping
+class HostVars(collections.Mapping):
''' A special view of vars_cache that adds values from the inventory when needed. '''
def __init__(self, vars_manager, play, inventory, loader):
@@ -36,7 +39,7 @@ class HostVars(dict):
self._lookup = {}
def __getitem__(self, host_name):
-
+
if host_name not in self._lookup:
host = self._inventory.get_host(host_name)
if not host:
@@ -46,3 +49,20 @@ class HostVars(dict):
self._lookup[host_name] = templar.template(result, fail_on_undefined=False)
return self._lookup[host_name]
+ def __contains__(self, host_name):
+ item = self.get(host_name)
+ if item and item is not j2undefined:
+ return True
+ return False
+
+ def __iter__(self):
+ raise NotImplementedError('HostVars does not support iteration as hosts are discovered on an as needed basis.')
+
+ def __len__(self):
+ raise NotImplementedError('HostVars does not support len. hosts entries are discovered dynamically as needed')
+
+ def __getstate__(self):
+ return self._lookup
+
+ def __setstate__(self, data):
+ self._lookup = data
diff --git a/test/integration/Makefile b/test/integration/Makefile
index 04953ee23c..50a56502f9 100644
--- a/test/integration/Makefile
+++ b/test/integration/Makefile
@@ -21,7 +21,7 @@ VAULT_PASSWORD_FILE = vault-password
CONSUL_RUNNING := $(shell python consul_running.py)
-all: parsing test_var_precedence unicode test_templating_settings non_destructive destructive includes check_mode test_hash test_handlers test_group_by test_vault test_tags
+all: parsing test_var_precedence unicode test_templating_settings environment non_destructive destructive includes check_mode test_hash test_handlers test_group_by test_vault test_tags test_lookup_paths
parsing:
ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5
@@ -38,6 +38,9 @@ unicode:
test_templating_settings:
ansible-playbook test_templating_settings.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
+environment:
+ ansible-playbook test_environment.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) $(TEST_FLAGS)
+
non_destructive:
ansible-playbook non_destructive.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
@@ -73,7 +76,7 @@ test_hash:
ANSIBLE_HASH_BEHAVIOUR=merge ansible-playbook test_hash.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v -e '{"test_hash":{"extra_args":"this is an extra arg"}}'
test_var_precedence:
- ansible-playbook test_var_precedence.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v -e 'extra_var=extra_var' -e 'extra_var_override=extra_var_override'
+ ansible-playbook test_var_precedence.yml -i $(INVENTORY) $(CREDENTIALS_ARG) $(TEST_FLAGS) -v -e 'extra_var=extra_var' -e 'extra_var_override=extra_var_override'
test_vault:
ansible-playbook test_vault.yml -i $(INVENTORY) $(CREDENTIALS_ARG) -v $(TEST_FLAGS) --vault-password-file $(VAULT_PASSWORD_FILE) --list-tasks
@@ -184,3 +187,6 @@ test_galaxy_yaml:
RC=$$? ; \
rm -rf $$mytmpdir ; \
exit $$RC
+
+test_lookup_paths:
+ ansible-playbook lookup_paths/play.yml -i $(INVENTORY) -v $(TEST_FLAGS)
diff --git a/test/integration/cloudstack.yml b/test/integration/cloudstack.yml
index 0102689ed0..6951fd2856 100644
--- a/test/integration/cloudstack.yml
+++ b/test/integration/cloudstack.yml
@@ -4,6 +4,8 @@
gather_facts: no
tags:
- cloudstack
+ vars:
+ cs_resource_prefix: "cs{{ resource_prefix | default('') }}"
roles:
- { role: test_cs_sshkeypair, tags: test_cs_sshkeypair }
- { role: test_cs_affinitygroup, tags: test_cs_affinitygroup }
diff --git a/test/integration/integration_config.yml b/test/integration/integration_config.yml
index bf5d6db3de..34a7cbf73d 100644
--- a/test/integration/integration_config.yml
+++ b/test/integration/integration_config.yml
@@ -1,5 +1,5 @@
---
-win_output_dir: 'C:/temp/'
+win_output_dir: 'C:\ansible_testing'
output_dir: ~/ansible_testing
non_root_test_user: ansible
pip_test_package: epdb
diff --git a/test/integration/lookup.ini b/test/integration/lookup.ini
new file mode 100644
index 0000000000..16500fd899
--- /dev/null
+++ b/test/integration/lookup.ini
@@ -0,0 +1,24 @@
+[global]
+# A comment
+value1=Text associated with value1 and global section
+value2=Same for value2 and global section
+value.dot=Properties with dot
+field.with.space = another space
+
+[section1]
+value1=section1/value1
+value2=section1/value2
+
+[value_section]
+value1=1
+value2=2
+value3=3
+other1=4
+other2=5
+
+[other_section]
+value1=1
+value2=2
+value3=3
+other1=4
+other2=5
diff --git a/test/integration/lookup.properties b/test/integration/lookup.properties
new file mode 100644
index 0000000000..f388d8cfbf
--- /dev/null
+++ b/test/integration/lookup.properties
@@ -0,0 +1,5 @@
+# A comment
+value1=Text associated with value1
+value2=Same for value2
+value.dot=Properties with dot
+field.with.space = another space
diff --git a/test/integration/lookup_paths/play.yml b/test/integration/lookup_paths/play.yml
new file mode 100644
index 0000000000..26a9ce10b9
--- /dev/null
+++ b/test/integration/lookup_paths/play.yml
@@ -0,0 +1,64 @@
+- name: setup state
+ hosts: testhost
+ connection: local
+ gather_facts: false
+ tasks:
+ - file: path={{playbook_dir}}/files state=directory
+ - file: path={{playbook_dir}}/roles/showfile/files state=directory
+ - copy: dest={{playbook_dir}}/roles/showfile/files/testfile content='in role files'
+ - copy: dest={{playbook_dir}}/roles/showfile/tasks/testfile content='in role tasks'
+ - copy: dest={{playbook_dir}}/roles/showfile/testfile content='in role'
+ - copy: dest={{playbook_dir}}/files/testfile content='in files'
+ - copy: dest={{playbook_dir}}/testfile content='in local'
+ - set_fact: role_out="in role files" play_out="in files" stage='setup'
+
+- include: testplay.yml
+
+- name: remove from role/files
+ hosts: testhost
+ connection: local
+ gather_facts: false
+ tasks:
+ - file: path={{playbook_dir}}/roles/showfile/files/testfile state=absent
+ - set_fact: role_out="in role tasks" play_out="in files" stage='remove 1'
+
+- include: testplay.yml
+
+- name: remove from role/tasks
+ hosts: testhost
+ connection: local
+ gather_facts: false
+ tasks:
+ - file: path={{playbook_dir}}/roles/showfile/tasks/testfile state=absent
+ - set_fact: role_out="in files" play_out="in files" stage='remote 2'
+
+- include: testplay.yml
+
+- name: remove from role dir
+ hosts: testhost
+ connection: local
+ gather_facts: false
+ tasks:
+ - file: path={{playbook_dir}}/roles/showfile/testfile state=absent
+ - set_fact: role_out="in files" play_out="in files" stage='remove 3'
+
+- include: testplay.yml
+
+- name: remove from play/files
+ hosts: testhost
+ connection: local
+ gather_facts: false
+ tasks:
+ - file: path={{playbook_dir}}/files/testfile state=absent
+ - set_fact: role_out="in local" play_out="in local" stage='remove 4'
+
+- include: testplay.yml
+
+- name: cleanup
+ hosts: testhost
+ connection: local
+ gather_facts: false
+ tasks:
+ - file: path={{playbook_dir}}/testfile state=absent
+ - file: path={{playbook_dir}}/files state=absent
+ - file: path={{playbook_dir}}/roles/showfile/files state=absent
diff --git a/test/integration/lookup_paths/roles/showfile/tasks/main.yml b/test/integration/lookup_paths/roles/showfile/tasks/main.yml
new file mode 100644
index 0000000000..1b3805798d
--- /dev/null
+++ b/test/integration/lookup_paths/roles/showfile/tasks/main.yml
@@ -0,0 +1,2 @@
+- name: relative to role
+ set_fact: role_result="{{lookup('file', 'testfile')}}"
diff --git a/test/integration/lookup_paths/testplay.yml b/test/integration/lookup_paths/testplay.yml
new file mode 100644
index 0000000000..8bf4be2c08
--- /dev/null
+++ b/test/integration/lookup_paths/testplay.yml
@@ -0,0 +1,19 @@
+- name: test initial state
+ hosts: testhost
+ connection: local
+ gather_facts: false
+ roles:
+ - showfile
+ tasks:
+ - name: from play
+ set_fact: play_result="{{lookup('file', 'testfile')}}"
+
+ - name: output output {{stage}}
+ debug: msg="play> {{play_out}}, role> {{role_out}}"
+
+ - name: verify that result match expected
+ assert:
+ that:
+ - 'play_result == play_out'
+ - 'role_result == role_out'
+
diff --git a/test/integration/roles/test_cs_affinitygroup/tasks/main.yml b/test/integration/roles/test_cs_affinitygroup/tasks/main.yml
index 7ebab20bad..b066bf4901 100644
--- a/test/integration/roles/test_cs_affinitygroup/tasks/main.yml
+++ b/test/integration/roles/test_cs_affinitygroup/tasks/main.yml
@@ -15,7 +15,7 @@
assert:
that:
- ag|failed
- - ag.msg == "missing required arguments: name"
+ - "ag.msg == 'missing required arguments: name'"
- name: test present affinity group
cs_affinitygroup: name={{ cs_resource_prefix }}_ag
diff --git a/test/integration/roles/test_cs_instancegroup/tasks/main.yml b/test/integration/roles/test_cs_instancegroup/tasks/main.yml
index e3a726bf6f..d0f3c2258c 100644
--- a/test/integration/roles/test_cs_instancegroup/tasks/main.yml
+++ b/test/integration/roles/test_cs_instancegroup/tasks/main.yml
@@ -15,7 +15,7 @@
assert:
that:
- ig|failed
- - ig.msg == "missing required arguments: name"
+ - "ig.msg == 'missing required arguments: name'"
- name: test present instance group
cs_instancegroup: name={{ cs_resource_prefix }}_ig
diff --git a/test/integration/roles/test_cs_securitygroup/tasks/main.yml b/test/integration/roles/test_cs_securitygroup/tasks/main.yml
index d22871739e..de62ba150f 100644
--- a/test/integration/roles/test_cs_securitygroup/tasks/main.yml
+++ b/test/integration/roles/test_cs_securitygroup/tasks/main.yml
@@ -15,7 +15,7 @@
assert:
that:
- sg|failed
- - sg.msg == "missing required arguments: name"
+ - "sg.msg == 'missing required arguments: name'"
- name: test present security group
cs_securitygroup: name={{ cs_resource_prefix }}_sg
diff --git a/test/integration/roles/test_cs_sshkeypair/tasks/main.yml b/test/integration/roles/test_cs_sshkeypair/tasks/main.yml
index 35023b38aa..2f10e0da92 100644
--- a/test/integration/roles/test_cs_sshkeypair/tasks/main.yml
+++ b/test/integration/roles/test_cs_sshkeypair/tasks/main.yml
@@ -10,7 +10,7 @@
assert:
that:
- sshkey|failed
- - sshkey.msg == "missing required arguments: name"
+ - "sshkey.msg == 'missing required arguments: name'"
- name: test ssh key creation
cs_sshkeypair: name={{ cs_resource_prefix }}-sshkey
diff --git a/test/integration/roles/test_failed_when/tasks/main.yml b/test/integration/roles/test_failed_when/tasks/main.yml
index a69cef74cf..4a5617e142 100644
--- a/test/integration/roles/test_failed_when/tasks/main.yml
+++ b/test/integration/roles/test_failed_when/tasks/main.yml
@@ -54,7 +54,7 @@
- assert:
that:
- - "'failed' not in result"
+ - "'failed' in result and result.failed"
- name: command rc 1 failed_when_result False
shell: exit 1
diff --git a/test/integration/roles/test_file/tasks/main.yml b/test/integration/roles/test_file/tasks/main.yml
index f59e487b06..518f91bf74 100644
--- a/test/integration/roles/test_file/tasks/main.yml
+++ b/test/integration/roles/test_file/tasks/main.yml
@@ -137,7 +137,7 @@
- name: decide to include or not include selinux tests
include: selinux_tests.yml
- when: selinux_installed.stdout != "" and selinux_enabled.stdout != "Disabled"
+ when: selinux_installed is defined and selinux_installed.stdout != "" and selinux_enabled.stdout != "Disabled"
- name: remote directory foobar
file: path={{output_dir}}/foobar state=absent
diff --git a/test/integration/roles/test_filters/files/9851.txt b/test/integration/roles/test_filters/files/9851.txt
new file mode 100644
index 0000000000..70b12793e1
--- /dev/null
+++ b/test/integration/roles/test_filters/files/9851.txt
@@ -0,0 +1,3 @@
+ [{
+ "k": "Quotes \"'\n"
+}]
diff --git a/test/integration/roles/test_filters/tasks/main.yml b/test/integration/roles/test_filters/tasks/main.yml
index e0a2281501..43f02870fd 100644
--- a/test/integration/roles/test_filters/tasks/main.yml
+++ b/test/integration/roles/test_filters/tasks/main.yml
@@ -25,6 +25,25 @@
- name: Verify that we workaround a py26 json bug
template: src=py26json.j2 dest={{output_dir}}/py26json.templated mode=0644
+- name: 9851 - Verify that we don't trigger https://github.com/ansible/ansible/issues/9851
+ copy:
+ content: " [{{item|to_nice_json}}]"
+ dest: "{{output_dir}}/9851.out"
+ with_items:
+ - {"k": "Quotes \"'\n"}
+
+- name: 9851 - copy known good output into place
+ copy: src=9851.txt dest={{output_dir}}/9851.txt
+
+- name: 9851 - Compare generated json to known good
+ shell: diff {{output_dir}}/9851.out {{output_dir}}/9851.txt
+ register: 9851_diff_result
+
+- name: 9851 - verify generated file matches known good
+ assert:
+ that:
+ - '9851_diff_result.stdout == ""'
+
- name: fill in a basic template
template: src=foo.j2 dest={{output_dir}}/foo.templated mode=0644
register: template_result
diff --git a/test/integration/roles/test_iterators/tasks/main.yml b/test/integration/roles/test_iterators/tasks/main.yml
index b324da7932..3f8d8b2848 100644
--- a/test/integration/roles/test_iterators/tasks/main.yml
+++ b/test/integration/roles/test_iterators/tasks/main.yml
@@ -81,6 +81,11 @@
with_sequence: count=0
register: count_of_zero
+- name: test with_sequence does 1 when start == end
+ debug: msg='should run once'
+ with_sequence: start=1 end=1
+ register: start_equal_end
+
- name: test with_sequence count 1
set_fact: "{{ 'x' + item }}={{ item }}"
with_sequence: count=1
@@ -88,6 +93,7 @@
- assert:
that:
+ - not start_equal_end| skipped
- count_of_zero | skipped
- not count_of_one | skipped
@@ -97,7 +103,7 @@
set_fact: "random={{ item }}"
with_random_choice:
- "foo"
- - "bar"
+ - "bar"
- name: verify with_random_choice
assert:
diff --git a/test/integration/roles/test_template/files/foo.txt b/test/integration/roles/test_template/files/foo.txt
index edd704da04..58af3be81b 100644
--- a/test/integration/roles/test_template/files/foo.txt
+++ b/test/integration/roles/test_template/files/foo.txt
@@ -3,6 +3,7 @@ templated_var_loaded
{
"bool": true,
"multi_part": "1Foo",
+ "null_type": null,
"number": 5,
"string_num": "5"
}
diff --git a/test/integration/roles/test_template/vars/main.yml b/test/integration/roles/test_template/vars/main.yml
index b79f95e6cf..16776cb7e8 100644
--- a/test/integration/roles/test_template/vars/main.yml
+++ b/test/integration/roles/test_template/vars/main.yml
@@ -5,10 +5,12 @@ string_num: "5"
bool_var: true
part_1: 1
part_2: "Foo"
+null_type: !!null
templated_dict:
number: "{{ number_var }}"
string_num: "{{ string_num }}"
+ null_type: "{{ null_type }}"
bool: "{{ bool_var }}"
multi_part: "{{ part_1 }}{{ part_2 }}"
diff --git a/test/integration/roles/test_win_feature/tasks/main.yml b/test/integration/roles/test_win_feature/tasks/main.yml
index a49622c232..4b31f8b358 100644
--- a/test/integration/roles/test_win_feature/tasks/main.yml
+++ b/test/integration/roles/test_win_feature/tasks/main.yml
@@ -17,10 +17,16 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+- name: check whether servermanager module is available (windows 2008 r2 or later)
+ raw: PowerShell -Command Import-Module ServerManager
+ register: win_feature_has_servermanager
+ ignore_errors: true
+
- name: start with feature absent
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
+ when: win_feature_has_servermanager|success
- name: install feature
win_feature:
@@ -30,6 +36,7 @@
include_sub_features: yes
include_management_tools: yes
register: win_feature_install_result
+ when: win_feature_has_servermanager|success
- name: check result of installing feature
assert:
@@ -45,6 +52,7 @@
- "win_feature_install_result.feature_result[0].restart_needed is defined"
- "win_feature_install_result.feature_result[0].skip_reason"
- "win_feature_install_result.feature_result[0].success is defined"
+ when: win_feature_has_servermanager|success
- name: install feature again
win_feature:
@@ -54,6 +62,7 @@
include_sub_features: yes
include_management_tools: yes
register: win_feature_install_again_result
+ when: win_feature_has_servermanager|success
- name: check result of installing feature again
assert:
@@ -63,12 +72,14 @@
- "win_feature_install_again_result.exitcode == 'NoChangeNeeded'"
- "not win_feature_install_again_result.restart_needed"
- "win_feature_install_again_result.feature_result == []"
+ when: win_feature_has_servermanager|success
- name: remove feature
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
register: win_feature_remove_result
+ when: win_feature_has_servermanager|success
- name: check result of removing feature
assert:
@@ -84,12 +95,14 @@
- "win_feature_remove_result.feature_result[0].restart_needed is defined"
- "win_feature_remove_result.feature_result[0].skip_reason"
- "win_feature_remove_result.feature_result[0].success is defined"
+ when: win_feature_has_servermanager|success
- name: remove feature again
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
register: win_feature_remove_again_result
+ when: win_feature_has_servermanager|success
- name: check result of removing feature again
assert:
@@ -99,6 +112,7 @@
- "win_feature_remove_again_result.exitcode == 'NoChangeNeeded'"
- "not win_feature_remove_again_result.restart_needed"
- "win_feature_remove_again_result.feature_result == []"
+ when: win_feature_has_servermanager|success
- name: try to install an invalid feature name
win_feature:
@@ -106,6 +120,7 @@
state: present
register: win_feature_install_invalid_result
ignore_errors: true
+ when: win_feature_has_servermanager|success
- name: check result of installing invalid feature name
assert:
@@ -114,6 +129,7 @@
- "not win_feature_install_invalid_result|changed"
- "win_feature_install_invalid_result.msg"
- "win_feature_install_invalid_result.exitcode == 'InvalidArgs'"
+ when: win_feature_has_servermanager|success
- name: try to remove an invalid feature name
win_feature:
@@ -121,6 +137,7 @@
state: absent
register: win_feature_remove_invalid_result
ignore_errors: true
+ when: win_feature_has_servermanager|success
- name: check result of removing invalid feature name
assert:
@@ -129,3 +146,4 @@
- "not win_feature_remove_invalid_result|changed"
- "win_feature_remove_invalid_result.msg"
- "win_feature_remove_invalid_result.exitcode == 'InvalidArgs'"
+ when: win_feature_has_servermanager|success
diff --git a/test/integration/roles/test_win_fetch/tasks/main.yml b/test/integration/roles/test_win_fetch/tasks/main.yml
index 8c0f5aa21f..f8b1865744 100644
--- a/test/integration/roles/test_win_fetch/tasks/main.yml
+++ b/test/integration/roles/test_win_fetch/tasks/main.yml
@@ -73,16 +73,14 @@
- "fetch_flat_stat.stat.isreg"
- "fetch_flat_stat.stat.md5 == fetch_flat.md5sum"
-- name: fetch a small file to flat directory (without trailing slash)
- fetch: src="C:/Windows/win.ini" dest="{{ output_dir }}" flat=yes
- register: fetch_flat_dir
- ignore_errors: true
+#- name: fetch a small file to flat directory (without trailing slash)
+# fetch: src="C:/Windows/win.ini" dest="{{ output_dir }}" flat=yes
+# register: fetch_flat_dir
-- name: check fetch flat to directory result
- assert:
- that:
- - "fetch_flat_dir|failed"
- - "fetch_flat_dir.msg"
+#- name: check fetch flat to directory result
+# assert:
+# that:
+# - "not fetch_flat_dir|changed"
- name: fetch a large binary file
fetch: src="C:/Windows/explorer.exe" dest={{ output_dir }}
@@ -114,7 +112,7 @@
- "not fetch_large_again.changed"
- name: fetch a small file using backslashes in src path
- fetch: src="C:\Windows\system.ini" dest={{ output_dir }}
+ fetch: src="C:\\Windows\\system.ini" dest={{ output_dir }}
register: fetch_small_bs
- name: check fetch small result with backslashes
@@ -157,7 +155,7 @@
- "not fetch_missing|changed"
- name: attempt to fetch a directory
- fetch: src="C:\Windows" dest={{ output_dir }}
+ fetch: src="C:\\Windows" dest={{ output_dir }}
register: fetch_dir
ignore_errors: true
diff --git a/test/integration/roles/test_win_file/tasks/main.yml b/test/integration/roles/test_win_file/tasks/main.yml
index 35ecfb6387..f823a16ff8 100644
--- a/test/integration/roles/test_win_file/tasks/main.yml
+++ b/test/integration/roles/test_win_file/tasks/main.yml
@@ -32,7 +32,7 @@
# - "file_result.state == 'file'"
- name: verify that we are checking an absent file
- win_file: path={{win_output_dir}}\bar.txt state=absent
+ win_file: path={{win_output_dir}}/bar.txt state=absent
register: file2_result
- name: verify that the file was marked as changed
@@ -42,7 +42,7 @@
# - "file2_result.state == 'absent'"
- name: verify we can touch a file
- win_file: path={{win_output_dir}}\baz.txt state=touch
+ win_file: path={{win_output_dir}}/baz.txt state=touch
register: file3_result
- name: verify that the file was marked as changed
@@ -85,8 +85,8 @@
# - "chown_result.failed == True"
# - "file_exists_result.stat.exists == False"
#
-- name: clean up
- win_file: path=/tmp/worldwritable state=absent
+#- name: clean up
+# win_file: path=/tmp/worldwritable state=absent
#- name: create soft link to file
# win_file: src={{output_file}} dest={{win_output_dir}}/soft.txt state=link
@@ -107,7 +107,7 @@
# - "file6_result.changed == true"
#
- name: create a directory
- win_file: path={{win_output_dir}}\foobar state=directory
+ win_file: path={{win_output_dir}}/foobar state=directory
register: file7_result
- debug: var=file7_result
@@ -134,22 +134,22 @@
# when: selinux_installed.stdout != "" and selinux_enabled.stdout != "Disabled"
- name: remote directory foobar
- win_file: path={{win_output_dir}}\foobar state=absent
+ win_file: path={{win_output_dir}}/foobar state=absent
- name: remove file foo.txt
- win_file: path={{win_output_dir}}\foo.txt state=absent
+ win_file: path={{win_output_dir}}/foo.txt state=absent
- name: remove file bar.txt
- win_file: path={{win_output_dir}}\foo.txt state=absent
+ win_file: path={{win_output_dir}}/foo.txt state=absent
- name: remove file baz.txt
- win_file: path={{win_output_dir}}\foo.txt state=absent
+ win_file: path={{win_output_dir}}/foo.txt state=absent
- name: win copy directory structure over
win_copy: src=foobar dest={{win_output_dir}}
- name: remove directory foobar
- win_file: path={{win_output_dir}}\foobar state=absent
+ win_file: path={{win_output_dir}}/foobar state=absent
register: file14_result
- debug: var=file14_result
diff --git a/test/integration/roles/test_win_msi/tasks/main.yml b/test/integration/roles/test_win_msi/tasks/main.yml
index d0d7034d78..85c9957a1d 100644
--- a/test/integration/roles/test_win_msi/tasks/main.yml
+++ b/test/integration/roles/test_win_msi/tasks/main.yml
@@ -17,7 +17,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: use win_get_url module to download msi
- win_get_url: url=http://downloads.sourceforge.net/project/sevenzip/7-Zip/9.22/7z922-x64.msi dest='C:\7z922-x64.msi'
+ win_get_url: url=http://downloads.sourceforge.net/project/sevenzip/7-Zip/9.22/7z922-x64.msi dest='C:\\7z922-x64.msi'
register: win_get_url_result
- name: install 7zip msi
diff --git a/test/integration/roles/test_win_ping/tasks/main.yml b/test/integration/roles/test_win_ping/tasks/main.yml
index 8bcbe910c4..f17a4a9227 100644
--- a/test/integration/roles/test_win_ping/tasks/main.yml
+++ b/test/integration/roles/test_win_ping/tasks/main.yml
@@ -28,7 +28,7 @@
- "win_ping_result.ping == 'pong'"
- name: test win_ping with data
- win_ping: data=blah
+ win_ping: data=☠
register: win_ping_with_data_result
- name: check win_ping result with data
@@ -36,21 +36,11 @@
that:
- "not win_ping_with_data_result|failed"
- "not win_ping_with_data_result|changed"
- - "win_ping_with_data_result.ping == 'blah'"
+ - "win_ping_with_data_result.ping == '☠'"
-#- name: test local ping (should use default ping)
-# local_action: ping
-# register: local_ping_result
-
-#- name: check local ping result
-# assert:
-# that:
-# - "not local_ping_result|failed"
-# - "not local_ping_result|changed"
-# - "local_ping_result.ping == 'pong'"
-
-- name: test win_ping.ps1 with data
- win_ping.ps1: data=bleep
+- name: test win_ping.ps1 with data as complex args
+ win_ping.ps1:
+ data: bleep
register: win_ping_ps1_result
- name: check win_ping.ps1 result with data
@@ -60,13 +50,32 @@
- "not win_ping_ps1_result|changed"
- "win_ping_ps1_result.ping == 'bleep'"
-#- name: test win_ping with invalid args
-# win_ping: arg=invalid
-# register: win_ping_ps1_invalid_args_result
-
-#- name: check that win_ping.ps1 with invalid args fails
-# assert:
-# that:
-# - "win_ping_ps1_invalid_args_result|failed"
-# - "win_ping_ps1_invalid_args_result.msg"
+- name: test win_ping with extra args to verify that v2 module replacer escaping works as expected
+ win_ping:
+ data: bloop
+ a_null: null
+ a_boolean: true
+ another_boolean: false
+ a_number: 299792458
+ another_number: 22.7
+ yet_another_number: 6.022e23
+ a_string: |
+ it's magic
+ "@'
+ '@"
+ an_array:
+ - first
+ - 2
+ - 3.0
+ an_object:
+ - the_thing: the_value
+ - the_other_thing: 0
+ - the_list_of_things: [1, 2, 3, 5]
+ register: win_ping_extra_args_result
+- name: check that win_ping with extra args succeeds and ignores everything except data
+ assert:
+ that:
+ - "not win_ping_extra_args_result|failed"
+ - "not win_ping_extra_args_result|changed"
+ - "win_ping_extra_args_result.ping == 'bloop'"
diff --git a/test/integration/roles/test_win_raw/tasks/main.yml b/test/integration/roles/test_win_raw/tasks/main.yml
index c51ba4b2cc..8a5412c381 100644
--- a/test/integration/roles/test_win_raw/tasks/main.yml
+++ b/test/integration/roles/test_win_raw/tasks/main.yml
@@ -72,7 +72,7 @@
- "not unknown_result|changed"
- name: run a command that takes longer than 60 seconds
- raw: PowerShell -Command Start-Sleep -s 75
+ raw: Start-Sleep -s 75
register: sleep_command
- name: assert that the sleep command ran
@@ -83,3 +83,12 @@
- "not sleep_command.stderr"
- "not sleep_command|failed"
- "not sleep_command|changed"
+
+- name: run a raw command with key=value arguments
+ raw: echo wwe=raw
+ register: raw_result
+
+- name: make sure raw is really raw and not removing key=value arguments
+ assert:
+ that:
+ - "raw_result.stdout_lines[0] == 'wwe=raw'"
diff --git a/test/integration/roles/test_win_script/defaults/main.yml b/test/integration/roles/test_win_script/defaults/main.yml
index a2c6475e75..90b756af0a 100644
--- a/test/integration/roles/test_win_script/defaults/main.yml
+++ b/test/integration/roles/test_win_script/defaults/main.yml
@@ -3,3 +3,4 @@
# Parameters to pass to test scripts.
test_win_script_value: VaLuE
test_win_script_splat: "@{This='THIS'; That='THAT'; Other='OTHER'}"
+test_win_script_filename: "C:/Users/{{ansible_ssh_user}}/testing_win_script.txt"
diff --git a/test/integration/roles/test_win_script/files/test_script.cmd b/test/integration/roles/test_win_script/files/test_script.cmd
new file mode 100644
index 0000000000..0e36312d0f
--- /dev/null
+++ b/test/integration/roles/test_win_script/files/test_script.cmd
@@ -0,0 +1,2 @@
+@ECHO OFF
+ECHO We can even run a batch file with cmd extension!
diff --git a/test/integration/roles/test_win_script/files/test_script_creates_file.ps1 b/test/integration/roles/test_win_script/files/test_script_creates_file.ps1
new file mode 100644
index 0000000000..47f85a2d49
--- /dev/null
+++ b/test/integration/roles/test_win_script/files/test_script_creates_file.ps1
@@ -0,0 +1,3 @@
+# Test script to create a file.
+
+echo $null > $args[0]
diff --git a/test/integration/roles/test_win_script/files/test_script_removes_file.ps1 b/test/integration/roles/test_win_script/files/test_script_removes_file.ps1
new file mode 100644
index 0000000000..f0549a5b3b
--- /dev/null
+++ b/test/integration/roles/test_win_script/files/test_script_removes_file.ps1
@@ -0,0 +1,3 @@
+# Test script to remove a file.
+
+Remove-Item $args[0] -Force
diff --git a/test/integration/roles/test_win_script/tasks/main.yml b/test/integration/roles/test_win_script/tasks/main.yml
index e1e5f25611..313569face 100644
--- a/test/integration/roles/test_win_script/tasks/main.yml
+++ b/test/integration/roles/test_win_script/tasks/main.yml
@@ -30,24 +30,24 @@
- "not test_script_result|failed"
- "test_script_result|changed"
-- name: run test script that takes arguments
- script: test_script_with_args.ps1 /this /that /other
+- name: run test script that takes arguments including a unicode char
+ script: test_script_with_args.ps1 /this /that /Ӧther
register: test_script_with_args_result
-- name: check that script ran and received arguments
+- name: check that script ran and received arguments and returned unicode
assert:
that:
- "test_script_with_args_result.rc == 0"
- "test_script_with_args_result.stdout"
- "test_script_with_args_result.stdout_lines[0] == '/this'"
- "test_script_with_args_result.stdout_lines[1] == '/that'"
- - "test_script_with_args_result.stdout_lines[2] == '/other'"
+ - "test_script_with_args_result.stdout_lines[2] == '/Ӧther'"
- "not test_script_with_args_result.stderr"
- "not test_script_with_args_result|failed"
- "test_script_with_args_result|changed"
- name: run test script that takes parameters passed via splatting
- script: test_script_with_splatting.ps1 "@{ This = 'this'; That = '{{ test_win_script_value }}'; Other = 'other'}"
+ script: test_script_with_splatting.ps1 @{ This = 'this'; That = '{{ test_win_script_value }}'; Other = 'other'}
register: test_script_with_splatting_result
- name: check that script ran and received parameters via splatting
@@ -63,7 +63,7 @@
- "test_script_with_splatting_result|changed"
- name: run test script that takes splatted parameters from a variable
- script: test_script_with_splatting.ps1 {{ test_win_script_splat|quote }}
+ script: test_script_with_splatting.ps1 {{ test_win_script_splat }}
register: test_script_with_splatting2_result
- name: check that script ran and received parameters via splatting from a variable
@@ -92,6 +92,58 @@
- "test_script_with_errors_result|failed"
- "test_script_with_errors_result|changed"
+- name: cleanup test file if it exists
+ raw: Remove-Item "{{test_win_script_filename}}" -Force
+ ignore_errors: true
+
+- name: run test script that creates a file
+ script: test_script_creates_file.ps1 "{{test_win_script_filename}}" creates="{{test_win_script_filename}}"
+ register: test_script_creates_file_result
+
+- name: check that script ran and indicated a change
+ assert:
+ that:
+ - "test_script_creates_file_result.rc == 0"
+ - "not test_script_creates_file_result.stdout"
+ - "not test_script_creates_file_result.stderr"
+ - "not test_script_creates_file_result|failed"
+ - "test_script_creates_file_result|changed"
+
+- name: run test script that creates a file again
+ script: test_script_creates_file.ps1 "{{test_win_script_filename}}" creates="{{test_win_script_filename}}"
+ register: test_script_creates_file_again_result
+
+- name: check that the script did not run since the remote file exists
+ assert:
+ that:
+ - "not test_script_creates_file_again_result|failed"
+ - "not test_script_creates_file_again_result|changed"
+ - "test_script_creates_file_again_result|skipped"
+
+- name: run test script that removes a file
+ script: test_script_removes_file.ps1 "{{test_win_script_filename}}" removes="{{test_win_script_filename}}"
+ register: test_script_removes_file_result
+
+- name: check that the script ran since the remote file exists
+ assert:
+ that:
+ - "test_script_removes_file_result.rc == 0"
+ - "not test_script_removes_file_result.stdout"
+ - "not test_script_removes_file_result.stderr"
+ - "not test_script_removes_file_result|failed"
+ - "test_script_removes_file_result|changed"
+
+- name: run test script that removes a file again
+ script: test_script_removes_file.ps1 "{{test_win_script_filename}}" removes="{{test_win_script_filename}}"
+ register: test_script_removes_file_again_result
+
+- name: check that the script did not run since the remote file does not exist
+ assert:
+ that:
+ - "not test_script_removes_file_again_result|failed"
+ - "not test_script_removes_file_again_result|changed"
+ - "test_script_removes_file_again_result|skipped"
+
- name: run simple batch file
script: test_script.bat
register: test_batch_result
@@ -105,3 +157,17 @@
- "not test_batch_result.stderr"
- "not test_batch_result|failed"
- "test_batch_result|changed"
+
+- name: run simple batch file with .cmd extension
+ script: test_script.cmd
+ register: test_cmd_result
+
+- name: check that batch file with .cmd extension ran
+ assert:
+ that:
+ - "test_cmd_result.rc == 0"
+ - "test_cmd_result.stdout"
+ - "'cmd extension' in test_cmd_result.stdout"
+ - "not test_cmd_result.stderr"
+ - "not test_cmd_result|failed"
+ - "test_cmd_result|changed"
diff --git a/test/integration/roles/test_win_setup/tasks/main.yml b/test/integration/roles/test_win_setup/tasks/main.yml
index c2f4728b21..fb13da1542 100644
--- a/test/integration/roles/test_win_setup/tasks/main.yml
+++ b/test/integration/roles/test_win_setup/tasks/main.yml
@@ -20,7 +20,7 @@
action: setup
register: setup_result
-- name: check setup result
+- name: check windows setup result
assert:
that:
- "not setup_result|failed"
@@ -38,6 +38,8 @@
- "setup_result.ansible_facts.ansible_interfaces[0]"
- "setup_result.ansible_facts.ansible_interfaces[0].interface_name"
- "setup_result.ansible_facts.ansible_interfaces[0].interface_index"
+ - "setup_result.ansible_facts.ansible_architecture"
+ - "setup_result.ansible_facts.ansible_os_name"
- "setup_result.ansible_facts.ansible_powershell_version"
- name: check setup result only when using https
diff --git a/test/integration/roles/test_win_stat/tasks/main.yml b/test/integration/roles/test_win_stat/tasks/main.yml
index 5069f51a80..5197c27fef 100644
--- a/test/integration/roles/test_win_stat/tasks/main.yml
+++ b/test/integration/roles/test_win_stat/tasks/main.yml
@@ -27,6 +27,12 @@
- "not win_stat_file.stat.isdir"
- "win_stat_file.stat.size > 0"
- "win_stat_file.stat.md5"
+ - "win_stat_file.stat.extension"
+ - "win_stat_file.stat.attributes"
+ - "win_stat_file.stat.owner"
+ - "win_stat_file.stat.creationtime"
+ - "win_stat_file.stat.lastaccesstime"
+ - "win_stat_file.stat.lastwritetime"
- "not win_stat_file|failed"
- "not win_stat_file|changed"
@@ -34,13 +40,19 @@
win_stat: path="C:\Windows\win.ini" get_md5=no
register: win_stat_file_no_md5
-- name: check win_stat file result without md
+- name: check win_stat file result without md5
assert:
that:
- "win_stat_file_no_md5.stat.exists"
- "not win_stat_file_no_md5.stat.isdir"
- "win_stat_file_no_md5.stat.size > 0"
- "not win_stat_file_no_md5.stat.md5|default('')"
+ - "win_stat_file_no_md5.stat.extension"
+ - "win_stat_file_no_md5.stat.attributes"
+ - "win_stat_file_no_md5.stat.owner"
+ - "win_stat_file_no_md5.stat.creationtime"
+ - "win_stat_file_no_md5.stat.lastaccesstime"
+ - "win_stat_file_no_md5.stat.lastwritetime"
- "not win_stat_file_no_md5|failed"
- "not win_stat_file_no_md5|changed"
@@ -53,6 +65,12 @@
that:
- "win_stat_dir.stat.exists"
- "win_stat_dir.stat.isdir"
+ - "win_stat_dir.stat.extension == ''"
+ - "win_stat_dir.stat.attributes"
+ - "win_stat_dir.stat.owner"
+ - "win_stat_dir.stat.creationtime"
+ - "win_stat_dir.stat.lastaccesstime"
+ - "win_stat_dir.stat.lastwritetime"
- "not win_stat_dir|failed"
- "not win_stat_dir|changed"
diff --git a/test/integration/roles/test_win_template/tasks/main.yml b/test/integration/roles/test_win_template/tasks/main.yml
index 9c2ea920ff..c276b8d323 100644
--- a/test/integration/roles/test_win_template/tasks/main.yml
+++ b/test/integration/roles/test_win_template/tasks/main.yml
@@ -39,13 +39,24 @@
that:
- "template_result.changed == true"
+- name: fill in a basic template again
+ win_template:
+ src: foo.j2
+ dest: "{{win_output_dir}}/foo.templated"
+ register: template_result2
+
+- name: verify that the template was not changed
+ assert:
+ that:
+ - "not template_result2|changed"
+
# VERIFY CONTENTS
- name: copy known good into place
- win_copy: src=foo.txt dest={{win_output_dir}}\foo.txt
+ win_copy: src=foo.txt dest={{win_output_dir}}\\foo.txt
- name: compare templated file to known good
- raw: fc.exe {{win_output_dir}}\foo.templated {{win_output_dir}}\foo.txt
+ raw: fc.exe {{win_output_dir}}\\foo.templated {{win_output_dir}}\\foo.txt
register: diff_result
- debug: var=diff_result
diff --git a/test/integration/roles/test_win_user/tasks/main.yml b/test/integration/roles/test_win_user/tasks/main.yml
index 0e22e332ae..0316afb61b 100644
--- a/test/integration/roles/test_win_user/tasks/main.yml
+++ b/test/integration/roles/test_win_user/tasks/main.yml
@@ -51,7 +51,7 @@
- "win_user_missing_query_result.state == 'absent'"
- name: test create user
- win_user: name="{{ test_win_user_name }}" password="{{ test_win_user_password }}" groups="Guests"
+ win_user: name="{{ test_win_user_name }}" password="{{ test_win_user_password }}" fullname="Test User" description="Test user account" groups="Guests"
register: win_user_create_result
- name: check user creation result
@@ -59,7 +59,8 @@
that:
- "win_user_create_result|changed"
- "win_user_create_result.name == '{{ test_win_user_name }}'"
- - "win_user_create_result.fullname == '{{ test_win_user_name }}'"
+ - "win_user_create_result.fullname == 'Test User'"
+ - "win_user_create_result.description == 'Test user account'"
- "win_user_create_result.path"
- "win_user_create_result.state == 'present'"
diff --git a/test/integration/test_environment.yml b/test/integration/test_environment.yml
new file mode 100644
index 0000000000..560661628e
--- /dev/null
+++ b/test/integration/test_environment.yml
@@ -0,0 +1,52 @@
+- hosts: testhost
+ vars:
+ - test1:
+ key1: val1
+ roles:
+ - { role: prepare_tests }
+ tasks:
+ - name: check that envvar does not exist
+ shell: echo $key1
+ register: test_env
+
+ - assert:
+ that:
+ - '"val1" not in test_env.stdout'
+
+ - name: check that envvar does exist
+ shell: echo $key1
+ environment: "{{test1}}"
+ register: test_env2
+
+ - assert:
+ that:
+ - '"val1" in test_env2.stdout'
+
+- hosts: testhost
+ tasks:
+ vars:
+ - test1:
+ key1: val1
+ - test2:
+ key1: not1
+ other1: val2
+ environment: "{{test1}}"
+ tasks:
+ - name: check that play envvar does exist
+ shell: echo $key1
+ register: test_env
+
+ - assert:
+ that:
+ - '"val1" in test_env.stdout'
+
+ - name: check that task envvar does exist
+ shell: echo $key1; echo $other1
+ register: test_env2
+ environment: "{{test2}}"
+
+ - assert:
+ that:
+ - '"val1" not in test_env2.stdout'
+ - '"not1" in test_env2.stdout'
+ - '"val2" in test_env2.stdout'
diff --git a/test/integration/test_lookup_properties.yml b/test/integration/test_lookup_properties.yml
new file mode 100644
index 0000000000..4d22ce642c
--- /dev/null
+++ b/test/integration/test_lookup_properties.yml
@@ -0,0 +1,40 @@
+---
+- name: "Lookup test"
+ hosts: "localhost"
+# connection: local
+ tasks:
+ - name: "read properties value"
+ set_fact:
+ test1: "{{lookup('ini', 'value1 type=properties file=lookup.properties')}}"
+ test2: "{{lookup('ini', 'value2 type=properties file=lookup.properties')}}"
+ test_dot: "{{lookup('ini', 'value.dot type=properties file=lookup.properties')}}"
+ field_with_space: "{{lookup('ini', 'field.with.space type=properties file=lookup.properties')}}"
+ - debug: var={{item}}
+ with_items: [ 'test1', 'test2', 'test_dot', 'field_with_space' ]
+ - name: "read ini value"
+ set_fact:
+ value1_global: "{{lookup('ini', 'value1 section=global file=lookup.ini')}}"
+ value2_global: "{{lookup('ini', 'value2 section=global file=lookup.ini')}}"
+ value1_section1: "{{lookup('ini', 'value1 section=section1 file=lookup.ini')}}"
+ - debug: var={{item}}
+ with_items: [ 'value1_global', 'value2_global', 'value1_section1' ]
+ - name: "read ini value with section and regexp"
+ set_fact:
+ value_section: "{{lookup('ini', 'value[1-2] section=value_section file=lookup.ini re=true')}}"
+ other_section: "{{lookup('ini', 'other[1-2] section=other_section file=lookup.ini re=true')}}"
+ - debug: var={{item}}
+ with_items: [ 'value_section', 'other_section' ]
+ - name: "Reading unknown value"
+ set_fact:
+ unknown: "{{lookup('ini', 'value2 default=unknown section=section1 file=lookup.ini')}}"
+ - debug: var=unknown
+ - name: "Looping over section section1"
+ debug: msg="{{item}}"
+ with_ini: value[1-2] section=section1 file=lookup.ini re=true
+ - name: "Looping over section value_section"
+ debug: msg="{{item}}"
+ with_ini: value[1-2] section=value_section file=lookup.ini re=true
+ - debug: msg="{{item}}"
+ with_ini: value[1-2] section=section1 file=lookup.ini re=true
+ register: _
+ - debug: var=_
diff --git a/test/units/executor/test_task_executor.py b/test/units/executor/test_task_executor.py
index 54febca4b7..0300b7ad07 100644
--- a/test/units/executor/test_task_executor.py
+++ b/test/units/executor/test_task_executor.py
@@ -25,7 +25,7 @@ from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.task_executor import TaskExecutor
from ansible.playbook.play_context import PlayContext
-from ansible.plugins import action_loader
+from ansible.plugins import action_loader, lookup_loader
from units.mock.loader import DictDataLoader
@@ -107,6 +107,7 @@ class TestTaskExecutor(unittest.TestCase):
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
+ mock_shared_loader.lookup_loader = lookup_loader
new_stdin = None
job_vars = dict()
diff --git a/test/units/mock/loader.py b/test/units/mock/loader.py
index f44df2efdb..88f3970913 100644
--- a/test/units/mock/loader.py
+++ b/test/units/mock/loader.py
@@ -29,11 +29,11 @@ class DictDataLoader(DataLoader):
def __init__(self, file_mapping=dict()):
assert type(file_mapping) == dict
+ super(DictDataLoader, self).__init__()
+
self._file_mapping = file_mapping
self._build_known_directories()
- super(DictDataLoader, self).__init__()
-
def load_from_file(self, path):
if path in self._file_mapping:
return self.load(self._file_mapping[path], path)
@@ -73,7 +73,7 @@ class DictDataLoader(DataLoader):
rebuild_dirs = False
if path not in self._file_mapping:
rebuild_dirs = True
-
+
self._file_mapping[path] = content
if rebuild_dirs:
diff --git a/test/units/parsing/test_mod_args.py b/test/units/parsing/test_mod_args.py
index 187edfa03c..bce31d6f1f 100644
--- a/test/units/parsing/test_mod_args.py
+++ b/test/units/parsing/test_mod_args.py
@@ -109,11 +109,11 @@ class TestModArgsDwim(unittest.TestCase):
def test_local_action_string(self):
m = ModuleArgsParser(dict(local_action='copy src=a dest=b'))
- mod, args, to = m.parse()
- self._debug(mod, args, to)
+ mod, args, connection = m.parse()
+ self._debug(mod, args, connection)
self.assertEqual(mod, 'copy')
self.assertEqual(args, dict(src='a', dest='b'))
- self.assertIs(to, 'localhost')
+ self.assertIs(connection, 'local')
def test_multiple_actions(self):
m = ModuleArgsParser(dict(action='shell echo hi', local_action='shell echo hi'))
diff --git a/test/units/playbook/test_play_context.py b/test/units/playbook/test_play_context.py
index 1eade6f540..175267ef6b 100644
--- a/test/units/playbook/test_play_context.py
+++ b/test/units/playbook/test_play_context.py
@@ -44,7 +44,7 @@ class TestPlayContext(unittest.TestCase):
connect_opts = True,
subset_opts = True,
check_opts = True,
- diff_opts = True,
+ inventory_opts = True,
)
def tearDown(self):
@@ -93,14 +93,13 @@ class TestPlayContext(unittest.TestCase):
mock_task.become_pass = 'mocktaskpass'
mock_task.no_log = False
- mock_host = MagicMock()
- mock_host.get_vars.return_value = dict(
+ all_vars = dict(
ansible_connection = 'mock_inventory',
ansible_ssh_port = 4321,
)
play_context = PlayContext(play=mock_play, options=options)
- play_context = play_context.set_task_and_host_override(task=mock_task, host=mock_host)
+ play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars)
self.assertEqual(play_context.connection, 'mock_inventory')
self.assertEqual(play_context.remote_user, 'mocktask')
self.assertEqual(play_context.port, 4321)
@@ -117,13 +116,15 @@ class TestPlayContext(unittest.TestCase):
default_cmd = "/bin/foo"
default_exe = "/bin/bash"
sudo_exe = C.DEFAULT_SUDO_EXE
- sudo_flags = C.DEFAULT_SUDO_FLAGS
+ sudo_flags = C.DEFAULT_SUDO_FLAGS + " -n "
su_exe = C.DEFAULT_SU_EXE
su_flags = C.DEFAULT_SU_FLAGS
pbrun_exe = 'pbrun'
pbrun_flags = ''
pfexec_exe = 'pfexec'
pfexec_flags = ''
+ doas_exe = 'doas'
+ doas_flags = ' -n -u foo '
cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
self.assertEqual(cmd, default_cmd)
@@ -133,7 +134,7 @@ class TestPlayContext(unittest.TestCase):
play_context.become_method = 'sudo'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
- self.assertEqual(cmd, """%s -c '%s -k && %s %s -S -p "%s" -u %s %s -c '"'"'echo %s; %s'"'"''""" % (default_exe, sudo_exe, sudo_exe, sudo_flags, play_context.prompt, play_context.become_user, default_exe, play_context.success_key, default_cmd))
+ self.assertEqual(cmd, """%s -c '%s %s -S -p "%s" -u %s %s -c '"'"'echo %s; %s'"'"''""" % (default_exe, sudo_exe, sudo_flags, play_context.prompt, play_context.become_user, default_exe, play_context.success_key, default_cmd))
play_context.become_method = 'su'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
@@ -147,6 +148,10 @@ class TestPlayContext(unittest.TestCase):
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
self.assertEqual(cmd, """%s -c '%s %s "'"'"'echo %s; %s'"'"'"'""" % (default_exe, pfexec_exe, pfexec_flags, play_context.success_key, default_cmd))
+ play_context.become_method = 'doas'
+ cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
+ self.assertEqual(cmd, """%s -c '%s %s echo %s && %s %s env ANSIBLE=true %s'""" % (default_exe, doas_exe, doas_flags, play_context.success_key, doas_exe, doas_flags, default_cmd))
+
play_context.become_method = 'bad'
self.assertRaises(AnsibleError, play_context.make_become_cmd, cmd=default_cmd, executable="/bin/bash")
diff --git a/test/units/playbook/test_role.py b/test/units/playbook/test_role.py
index 208fe9aeda..e0764a9b5b 100644
--- a/test/units/playbook/test_role.py
+++ b/test/units/playbook/test_role.py
@@ -49,7 +49,7 @@ class TestRole(unittest.TestCase):
mock_play = MagicMock()
mock_play.ROLE_CACHE = {}
- i = RoleInclude.load('foo_tasks', loader=fake_loader)
+ i = RoleInclude.load('foo_tasks', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(str(r), 'foo_tasks')
@@ -68,7 +68,7 @@ class TestRole(unittest.TestCase):
mock_play = MagicMock()
mock_play.ROLE_CACHE = {}
- i = RoleInclude.load('foo_handlers', loader=fake_loader)
+ i = RoleInclude.load('foo_handlers', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(len(r._handler_blocks), 1)
@@ -88,7 +88,7 @@ class TestRole(unittest.TestCase):
mock_play = MagicMock()
mock_play.ROLE_CACHE = {}
- i = RoleInclude.load('foo_vars', loader=fake_loader)
+ i = RoleInclude.load('foo_vars', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(r._default_vars, dict(foo='bar'))
@@ -134,7 +134,7 @@ class TestRole(unittest.TestCase):
mock_play = MagicMock()
mock_play.ROLE_CACHE = {}
- i = RoleInclude.load('foo_metadata', loader=fake_loader)
+ i = RoleInclude.load('foo_metadata', play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
role_deps = r.get_direct_dependencies()
@@ -152,13 +152,13 @@ class TestRole(unittest.TestCase):
self.assertEqual(all_deps[1].get_name(), 'baz_metadata')
self.assertEqual(all_deps[2].get_name(), 'bar_metadata')
- i = RoleInclude.load('bad1_metadata', loader=fake_loader)
+ i = RoleInclude.load('bad1_metadata', play=mock_play, loader=fake_loader)
self.assertRaises(AnsibleParserError, Role.load, i, play=mock_play)
- i = RoleInclude.load('bad2_metadata', loader=fake_loader)
+ i = RoleInclude.load('bad2_metadata', play=mock_play, loader=fake_loader)
self.assertRaises(AnsibleParserError, Role.load, i, play=mock_play)
- i = RoleInclude.load('recursive1_metadata', loader=fake_loader)
+ i = RoleInclude.load('recursive1_metadata', play=mock_play, loader=fake_loader)
self.assertRaises(AnsibleError, Role.load, i, play=mock_play)
def test_load_role_complex(self):
@@ -175,7 +175,7 @@ class TestRole(unittest.TestCase):
mock_play = MagicMock()
mock_play.ROLE_CACHE = {}
- i = RoleInclude.load(dict(role='foo_complex'), loader=fake_loader)
+ i = RoleInclude.load(dict(role='foo_complex'), play=mock_play, loader=fake_loader)
r = Role.load(i, play=mock_play)
self.assertEqual(r.get_name(), "foo_complex")
diff --git a/test/units/template/test_templar.py b/test/units/template/test_templar.py
index 6d2301fb9f..e4627c2c61 100644
--- a/test/units/template/test_templar.py
+++ b/test/units/template/test_templar.py
@@ -19,8 +19,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from jinja2.exceptions import UndefinedError
-
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
@@ -75,8 +73,8 @@ class TestTemplar(unittest.TestCase):
#self.assertEqual(templar.template("{{lookup('file', '/path/to/my_file.txt')}}"), "foo")
# force errors
- self.assertRaises(UndefinedError, templar.template, "{{bad_var}}")
- self.assertRaises(UndefinedError, templar.template, "{{lookup('file', bad_var)}}")
+ self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{bad_var}}")
+ self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{lookup('file', bad_var)}}")
self.assertRaises(AnsibleError, templar.template, "{{lookup('bad_lookup')}}")
self.assertRaises(AnsibleError, templar.template, "{{recursive}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{foo-bar}}")
diff --git a/tox.ini b/tox.ini
index dd94e045ab..ad3d37b521 100644
--- a/tox.ini
+++ b/tox.ini
@@ -8,16 +8,16 @@ whitelist_externals = make
[testenv:py26]
commands =
+ python --version
python -m compileall -fq -x 'test|samples|contrib/inventory/vagrant.py' .
- python2.4 -m compileall -fq -x 'module_utils/(a10|rax|openstack|ec2|gce).py' lib/ansible/module_utils
make tests
deps = -r{toxinidir}/test-requirements.txt
whitelist_externals =
make
- python2.4
[testenv:py27]
commands =
+ python --version
python -m compileall -fq -x 'test|samples' .
make tests
deps = -r{toxinidir}/test-requirements.txt
@@ -25,6 +25,7 @@ whitelist_externals = make
[testenv:py34]
commands =
+ python --version
python -m compileall -fq -x 'lib/ansible/module_utils' lib
make tests
deps = -r{toxinidir}/test-requirements.txt
diff --git a/v1/README.md b/v1/README.md
index 98ae99854d..011851da06 100644
--- a/v1/README.md
+++ b/v1/README.md
@@ -3,7 +3,8 @@ Using this code should be equivalent of checking out the v1_last tag, which was
The stable-1.9 is the maintenance branch for the 1.9.x code, which might continue to diverge from the v1/ tree as bugs get fixed.
DO NOT:
- * use this code as reference
+
+ * use this code as reference
* make PRs against this code
* expect this code to be shipped with the 2.0 version of ansible
diff --git a/v1/ansible/utils/template.py b/v1/ansible/utils/template.py
index fb35924ce1..368b2067c3 100644
--- a/v1/ansible/utils/template.py
+++ b/v1/ansible/utils/template.py
@@ -32,6 +32,7 @@ import pwd
import ast
import traceback
from numbers import Number
+from types import NoneType
from ansible.utils.string_functions import count_newlines_from_end
from ansible.utils import to_bytes, to_unicode
@@ -343,7 +344,7 @@ def template_from_string(basedir, data, vars, fail_on_undefined=False):
var_name = only_one.group(1)
if var_name in vars:
resolved_val = vars[var_name]
- if isinstance(resolved_val, (bool, Number)):
+ if isinstance(resolved_val, (bool, Number, NoneType)):
return resolved_val
def my_finalize(thing):