status
stringclasses 1
value | repo_name
stringclasses 31
values | repo_url
stringclasses 31
values | issue_id
int64 1
104k
| title
stringlengths 4
369
| body
stringlengths 0
254k
⌀ | issue_url
stringlengths 37
56
| pull_url
stringlengths 37
54
| before_fix_sha
stringlengths 40
40
| after_fix_sha
stringlengths 40
40
| report_datetime
timestamp[us, tz=UTC] | language
stringclasses 5
values | commit_datetime
timestamp[us, tz=UTC] | updated_file
stringlengths 4
188
| file_content
stringlengths 0
5.12M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,948 |
pause task with minutes/seconds can continue with 'enter'
|
### Summary
When this playbook is run:
```yaml
---
- hosts: all
tasks:
- pause:
minutes: 2
```
Then in this scene:
```bash
TASK [pause] ************
Pausing for 120 seconds
(ctrl+C then 'C' = continue early, ctrl+C then 'A' = abort)
```
Before ansible 2.9.15, must use 'ctrl+C' then 'C' or 'A' to continue.
But in ansible 2.9.16 to 2.10.7, this scene can use 'enter' to continue.
### Issue Type
Bug Report
### Component Name
pause
### Ansible Version
2.9.16 to 2.10.7
### Configuration
none
### OS / Environment
debian 9
### Steps to Reproduce
```yaml
---
- hosts: all
tasks:
- pause:
minutes: 2
```
### Expected Results
Must use 'ctrl+C' then 'C' or 'A' to continue. Do not response 'enter' to continue.
### Actual Results
Press 'enter' this task to continue.
|
https://github.com/ansible/ansible/issues/73948
|
https://github.com/ansible/ansible/pull/74030
|
a84c1a5669716f6c597a656c1fc02d42f60248ee
|
1527078a8f4fb80cb3f2c48c00b3e683086332eb
| 2021-03-18T07:35:44Z |
python
| 2021-03-29T19:39:42Z |
changelogs/fragments/73948-pause-no-enter-with-timeout.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,948 |
pause task with minutes/seconds can continue with 'enter'
|
### Summary
When this playbook is run:
```yaml
---
- hosts: all
tasks:
- pause:
minutes: 2
```
Then in this scene:
```bash
TASK [pause] ************
Pausing for 120 seconds
(ctrl+C then 'C' = continue early, ctrl+C then 'A' = abort)
```
Before ansible 2.9.15, must use 'ctrl+C' then 'C' or 'A' to continue.
But in ansible 2.9.16 to 2.10.7, this scene can use 'enter' to continue.
### Issue Type
Bug Report
### Component Name
pause
### Ansible Version
2.9.16 to 2.10.7
### Configuration
none
### OS / Environment
debian 9
### Steps to Reproduce
```yaml
---
- hosts: all
tasks:
- pause:
minutes: 2
```
### Expected Results
Must use 'ctrl+C' then 'C' or 'A' to continue. Do not response 'enter' to continue.
### Actual Results
Press 'enter' this task to continue.
|
https://github.com/ansible/ansible/issues/73948
|
https://github.com/ansible/ansible/pull/74030
|
a84c1a5669716f6c597a656c1fc02d42f60248ee
|
1527078a8f4fb80cb3f2c48c00b3e683086332eb
| 2021-03-18T07:35:44Z |
python
| 2021-03-29T19:39:42Z |
lib/ansible/plugins/action/pause.py
|
# Copyright 2012, Tim Bielawa <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import signal
import sys
import termios
import time
import tty
from os import (
getpgrp,
isatty,
tcgetpgrp,
)
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import PY3
from ansible.plugins.action import ActionBase
from ansible.utils.display import Display
display = Display()
try:
import curses
import io
# Nest the try except since curses.error is not available if curses did not import
try:
curses.setupterm()
HAS_CURSES = True
except (curses.error, TypeError, io.UnsupportedOperation):
HAS_CURSES = False
except ImportError:
HAS_CURSES = False
if HAS_CURSES:
MOVE_TO_BOL = curses.tigetstr('cr')
CLEAR_TO_EOL = curses.tigetstr('el')
else:
MOVE_TO_BOL = b'\r'
CLEAR_TO_EOL = b'\x1b[K'
class AnsibleTimeoutExceeded(Exception):
pass
def timeout_handler(signum, frame):
raise AnsibleTimeoutExceeded
def clear_line(stdout):
stdout.write(b'\x1b[%s' % MOVE_TO_BOL)
stdout.write(b'\x1b[%s' % CLEAR_TO_EOL)
def is_interactive(fd=None):
if fd is None:
return False
if isatty(fd):
# Compare the current process group to the process group associated
# with terminal of the given file descriptor to determine if the process
# is running in the background.
return getpgrp() == tcgetpgrp(fd)
else:
return False
class ActionModule(ActionBase):
''' pauses execution for a length or time, or until input is received '''
BYPASS_HOST_LOOP = True
_VALID_ARGS = frozenset(('echo', 'minutes', 'prompt', 'seconds'))
def run(self, tmp=None, task_vars=None):
''' run the pause action module '''
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
duration_unit = 'minutes'
prompt = None
seconds = None
echo = True
echo_prompt = ''
result.update(dict(
changed=False,
rc=0,
stderr='',
stdout='',
start=None,
stop=None,
delta=None,
echo=echo
))
# Should keystrokes be echoed to stdout?
if 'echo' in self._task.args:
try:
echo = boolean(self._task.args['echo'])
except TypeError as e:
result['failed'] = True
result['msg'] = to_native(e)
return result
# Add a note saying the output is hidden if echo is disabled
if not echo:
echo_prompt = ' (output is hidden)'
# Is 'prompt' a key in 'args'?
if 'prompt' in self._task.args:
prompt = "[%s]\n%s%s:" % (self._task.get_name().strip(), self._task.args['prompt'], echo_prompt)
else:
# If no custom prompt is specified, set a default prompt
prompt = "[%s]\n%s%s:" % (self._task.get_name().strip(), 'Press enter to continue, Ctrl+C to interrupt', echo_prompt)
# Are 'minutes' or 'seconds' keys that exist in 'args'?
if 'minutes' in self._task.args or 'seconds' in self._task.args:
try:
if 'minutes' in self._task.args:
# The time() command operates in seconds so we need to
# recalculate for minutes=X values.
seconds = int(self._task.args['minutes']) * 60
else:
seconds = int(self._task.args['seconds'])
duration_unit = 'seconds'
except ValueError as e:
result['failed'] = True
result['msg'] = u"non-integer value given for prompt duration:\n%s" % to_text(e)
return result
########################################################################
# Begin the hard work!
start = time.time()
result['start'] = to_text(datetime.datetime.now())
result['user_input'] = b''
stdin_fd = None
old_settings = None
try:
if seconds is not None:
if seconds < 1:
seconds = 1
# setup the alarm handler
signal.signal(signal.SIGALRM, timeout_handler)
signal.alarm(seconds)
# show the timer and control prompts
display.display("Pausing for %d seconds%s" % (seconds, echo_prompt))
display.display("(ctrl+C then 'C' = continue early, ctrl+C then 'A' = abort)\r"),
# show the prompt specified in the task
if 'prompt' in self._task.args:
display.display(prompt)
else:
display.display(prompt)
# save the attributes on the existing (duped) stdin so
# that we can restore them later after we set raw mode
stdin_fd = None
stdout_fd = None
try:
if PY3:
stdin = self._connection._new_stdin.buffer
stdout = sys.stdout.buffer
else:
stdin = self._connection._new_stdin
stdout = sys.stdout
stdin_fd = stdin.fileno()
stdout_fd = stdout.fileno()
except (ValueError, AttributeError):
# ValueError: someone is using a closed file descriptor as stdin
# AttributeError: someone is using a null file descriptor as stdin on windoze
stdin = None
interactive = is_interactive(stdin_fd)
if interactive:
# grab actual Ctrl+C sequence
try:
intr = termios.tcgetattr(stdin_fd)[6][termios.VINTR]
except Exception:
# unsupported/not present, use default
intr = b'\x03' # value for Ctrl+C
# get backspace sequences
try:
backspace = termios.tcgetattr(stdin_fd)[6][termios.VERASE]
except Exception:
backspace = [b'\x7f', b'\x08']
old_settings = termios.tcgetattr(stdin_fd)
tty.setraw(stdin_fd)
# Only set stdout to raw mode if it is a TTY. This is needed when redirecting
# stdout to a file since a file cannot be set to raw mode.
if isatty(stdout_fd):
tty.setraw(stdout_fd)
# Only echo input if no timeout is specified
if not seconds and echo:
new_settings = termios.tcgetattr(stdin_fd)
new_settings[3] = new_settings[3] | termios.ECHO
termios.tcsetattr(stdin_fd, termios.TCSANOW, new_settings)
# flush the buffer to make sure no previous key presses
# are read in below
termios.tcflush(stdin, termios.TCIFLUSH)
while True:
if not interactive:
if seconds is None:
display.warning("Not waiting for response to prompt as stdin is not interactive")
if seconds is not None:
# Give the signal handler enough time to timeout
time.sleep(seconds + 1)
break
try:
key_pressed = stdin.read(1)
if key_pressed == intr: # value for Ctrl+C
clear_line(stdout)
raise KeyboardInterrupt
# read key presses and act accordingly
if key_pressed in (b'\r', b'\n'):
clear_line(stdout)
break
elif key_pressed in backspace:
# delete a character if backspace is pressed
result['user_input'] = result['user_input'][:-1]
clear_line(stdout)
if echo:
stdout.write(result['user_input'])
stdout.flush()
else:
result['user_input'] += key_pressed
except KeyboardInterrupt:
signal.alarm(0)
display.display("Press 'C' to continue the play or 'A' to abort \r"),
if self._c_or_a(stdin):
clear_line(stdout)
break
clear_line(stdout)
raise AnsibleError('user requested abort!')
except AnsibleTimeoutExceeded:
# this is the exception we expect when the alarm signal
# fires, so we simply ignore it to move into the cleanup
pass
finally:
# cleanup and save some information
# restore the old settings for the duped stdin stdin_fd
if not(None in (stdin_fd, old_settings)) and isatty(stdin_fd):
termios.tcsetattr(stdin_fd, termios.TCSADRAIN, old_settings)
duration = time.time() - start
result['stop'] = to_text(datetime.datetime.now())
result['delta'] = int(duration)
if duration_unit == 'minutes':
duration = round(duration / 60.0, 2)
else:
duration = round(duration, 2)
result['stdout'] = "Paused for %s %s" % (duration, duration_unit)
result['user_input'] = to_text(result['user_input'], errors='surrogate_or_strict')
return result
def _c_or_a(self, stdin):
while True:
key_pressed = stdin.read(1)
if key_pressed.lower() == b'a':
return False
elif key_pressed.lower() == b'c':
return True
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,948 |
pause task with minutes/seconds can continue with 'enter'
|
### Summary
When this playbook is run:
```yaml
---
- hosts: all
tasks:
- pause:
minutes: 2
```
Then in this scene:
```bash
TASK [pause] ************
Pausing for 120 seconds
(ctrl+C then 'C' = continue early, ctrl+C then 'A' = abort)
```
Before ansible 2.9.15, must use 'ctrl+C' then 'C' or 'A' to continue.
But in ansible 2.9.16 to 2.10.7, this scene can use 'enter' to continue.
### Issue Type
Bug Report
### Component Name
pause
### Ansible Version
2.9.16 to 2.10.7
### Configuration
none
### OS / Environment
debian 9
### Steps to Reproduce
```yaml
---
- hosts: all
tasks:
- pause:
minutes: 2
```
### Expected Results
Must use 'ctrl+C' then 'C' or 'A' to continue. Do not response 'enter' to continue.
### Actual Results
Press 'enter' this task to continue.
|
https://github.com/ansible/ansible/issues/73948
|
https://github.com/ansible/ansible/pull/74030
|
a84c1a5669716f6c597a656c1fc02d42f60248ee
|
1527078a8f4fb80cb3f2c48c00b3e683086332eb
| 2021-03-18T07:35:44Z |
python
| 2021-03-29T19:39:42Z |
test/integration/targets/pause/test-pause.py
|
#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import pexpect
import sys
import termios
from ansible.module_utils.six import PY2
args = sys.argv[1:]
env_vars = {
'ANSIBLE_ROLES_PATH': './roles',
'ANSIBLE_NOCOLOR': 'True',
'ANSIBLE_RETRY_FILES_ENABLED': 'False'
}
try:
backspace = termios.tcgetattr(sys.stdin.fileno())[6][termios.VERASE]
except Exception:
backspace = b'\x7f'
if PY2:
log_buffer = sys.stdout
else:
log_buffer = sys.stdout.buffer
os.environ.update(env_vars)
# -- Plain pause -- #
playbook = 'pause-1.yml'
# Case 1 - Contiune with enter
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
pause_test.send('\r')
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
pause_test.close()
# Case 2 - Continue with C
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
pause_test.send('\x03')
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
pause_test.send('C')
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
pause_test.close()
# Case 3 - Abort with A
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
pause_test.send('\x03')
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
pause_test.send('A')
pause_test.expect('user requested abort!')
pause_test.expect(pexpect.EOF)
pause_test.close()
# -- Custom Prompt -- #
playbook = 'pause-2.yml'
# Case 1 - Contiune with enter
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Custom prompt:')
pause_test.send('\r')
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
pause_test.close()
# Case 2 - Contiune with C
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Custom prompt:')
pause_test.send('\x03')
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
pause_test.send('C')
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
pause_test.close()
# Case 3 - Abort with A
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Custom prompt:')
pause_test.send('\x03')
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
pause_test.send('A')
pause_test.expect('user requested abort!')
pause_test.expect(pexpect.EOF)
pause_test.close()
# -- Pause for N seconds -- #
playbook = 'pause-3.yml'
# Case 1 - Wait for task to continue after timeout
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
pause_test.close()
# Case 2 - Contiune with Ctrl + C, C
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.send('\x03')
pause_test.send('C')
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
pause_test.close()
# Case 3 - Abort with Ctrl + C, A
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.send('\x03')
pause_test.send('A')
pause_test.expect('user requested abort!')
pause_test.expect(pexpect.EOF)
pause_test.close()
# -- Pause for N seconds with custom prompt -- #
playbook = 'pause-4.yml'
# Case 1 - Wait for task to continue after timeout
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.expect(r"Waiting for two seconds:")
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
pause_test.close()
# Case 2 - Contiune with Ctrl + C, C
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.expect(r"Waiting for two seconds:")
pause_test.send('\x03')
pause_test.send('C')
pause_test.expect('Task after pause')
pause_test.expect(pexpect.EOF)
pause_test.close()
# Case 3 - Abort with Ctrl + C, A
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Pausing for \d+ seconds')
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
pause_test.expect(r"Waiting for two seconds:")
pause_test.send('\x03')
pause_test.send('A')
pause_test.expect('user requested abort!')
pause_test.expect(pexpect.EOF)
pause_test.close()
# -- Enter input and ensure it's captured, echoed, and can be edited -- #
playbook = 'pause-5.yml'
pause_test = pexpect.spawn(
'ansible-playbook',
args=[playbook] + args,
timeout=10,
env=os.environ
)
pause_test.logfile = log_buffer
pause_test.expect(r'Enter some text:')
pause_test.send('hello there')
pause_test.send('\r')
pause_test.expect(r'Enter some text to edit:')
pause_test.send('hello there')
pause_test.send(backspace * 4)
pause_test.send('ommy boy')
pause_test.send('\r')
pause_test.expect(r'Enter some text \(output is hidden\):')
pause_test.send('supersecretpancakes')
pause_test.send('\r')
pause_test.expect(pexpect.EOF)
pause_test.close()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,060 |
Debug of ansible_facts causes injection of unprefixed variables
|
### Summary
When calling `debug` with `var: ansible_facts`, the unprefixed keys of `ansible_facts` are injected as variables and override at least some other variable precedences (host vars at least) within ansibles precedence order.
I believe the issue occurs in [ansible/executor/task_executor.py](https://github.com/ansible/ansible/blob/devel/lib/ansible/executor/task_executor.py#L715) as `ansible_facts` would be in results, but without `ansible_` prefixes in keys at this point. I think this issue may have been previously masked by the behavior of `AnsibleJ2Vars.__getitem__`, which changed in [this commit](https://github.com/ansible/ansible/commit/a2af8432f36ec8cc5368a747f1211d2b9ba01f2e#diff-5671ace2a63ef6117ee1fb489c22b1d1a3a0a0d6352b10a0f18d02ee89faa57aL84).
### Issue Type
Bug Report
### Component Name
- lib/ansible/executor/task_executor
- lib/ansible/template/vars
### Ansible Version
```console (paste below)
$ ansible --version
ansible 2.9.19
config file = omitted
configured module search path = [u'/omitted/library']
ansible python module location = /usr/lib/python2.7/dist-packages/ansible
executable location = /usr/bin/ansible
python version = 2.7.17 (default, Feb 27 2021, 15:10:58) [GCC 7.5.0]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
CACHE_PLUGIN(/omitted/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/omitted/ansible.cfg) = .fact_cache/
CACHE_PLUGIN_TIMEOUT(/omitted/ansible.cfg) = 86400
DEFAULT_FILTER_PLUGIN_PATH(env: ANSIBLE_FILTER_PLUGINS) = [u'/omitted/filter_plugins']
DEFAULT_GATHERING(/omitted/ansible.cfg) = implicit
DEFAULT_HOST_LIST(/omitted/ansible.cfg) = [u'/omitted/hosts']
DEFAULT_LOG_PATH(/omitted/ansible.cfg) = /omitted/ansible.log
DEFAULT_MODULE_PATH(/omitted/ansible.cfg) = [u'/omitted/library']
DEFAULT_REMOTE_USER(/omitted/ansible.cfg) = omitted
DEFAULT_ROLES_PATH(/omitted/ansible.cfg) = [u'/omitted/roles']
DEFAULT_VAULT_IDENTITY_LIST(/omitted/ansible.cfg) = omitted
RETRY_FILES_ENABLED(/omitted/ansible.cfg) = False
```
### OS / Environment
Ubuntu 18.04 LTS (bionic) using the ansible/ansible PPA
### Steps to Reproduce
Remove fact cache of hostname first.
```yaml (paste below)
- hosts: hostname
tasks:
- debug:
var: system_vendor
- debug:
var: interfaces
- debug:
var: ansible_facts
- debug:
var: system_vendor
- debug:
var: interfaces
- fail:
```
### Expected Results
I expected to be able to access the `interfaces` variable provided in my host vars. I did not expect to have a variable `system_vendor` in any scope.
In playbook order, I expected
1. Successful `setup`
1. not defined error
1. my `interfaces` variable
1. contents of `ansible_facts`
1. not defined error
1. my `interfaces` variable
1. fail as requested
### Actual Results
After calling debug with `var: ansible_facts`, non-prefixed keys of `ansible_facts` are injected into variables and appear to have a higher priority than host vars at least.
```console (paste below)
PLAY [groups] **********************************************************************************************************************************************************************
TASK [Gathering Facts] *******************************************************************************************************************************************************************************************************************************************************
ok: [hostname]
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VARIABLE IS NOT DEFINED!"
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": {
"dict_key1": "val1",
"dict_key2": "val2"
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"ansible_facts": {
[... not relevant ...],
"interfaces": [
"lo",
"eth0",
"eth1"
],
[... not relevant ...],
"system_vendor": "VMware, Inc.",
[... not relevant ...]
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VMware, Inc."
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": [
"lo",
"eth0",
"eth1"
]
}
TASK [fail] ******************************************************************************************************************************************************************************************************************************************************************
fatal: [hostname]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
PLAY RECAP *******************************************************************************************************************************************************************************************************************************************************************
hostname : ok=7 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
|
https://github.com/ansible/ansible/issues/74060
|
https://github.com/ansible/ansible/pull/74067
|
112a7718c6c776820a215562c91b092ed8e96ae1
|
f9f839fa08eee46ad7a86d6cbc7519541a50c7ef
| 2021-03-29T20:00:25Z |
python
| 2021-03-31T13:30:09Z |
changelogs/fragments/debug_dont_set_facts.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,060 |
Debug of ansible_facts causes injection of unprefixed variables
|
### Summary
When calling `debug` with `var: ansible_facts`, the unprefixed keys of `ansible_facts` are injected as variables and override at least some other variable precedences (host vars at least) within ansibles precedence order.
I believe the issue occurs in [ansible/executor/task_executor.py](https://github.com/ansible/ansible/blob/devel/lib/ansible/executor/task_executor.py#L715) as `ansible_facts` would be in results, but without `ansible_` prefixes in keys at this point. I think this issue may have been previously masked by the behavior of `AnsibleJ2Vars.__getitem__`, which changed in [this commit](https://github.com/ansible/ansible/commit/a2af8432f36ec8cc5368a747f1211d2b9ba01f2e#diff-5671ace2a63ef6117ee1fb489c22b1d1a3a0a0d6352b10a0f18d02ee89faa57aL84).
### Issue Type
Bug Report
### Component Name
- lib/ansible/executor/task_executor
- lib/ansible/template/vars
### Ansible Version
```console (paste below)
$ ansible --version
ansible 2.9.19
config file = omitted
configured module search path = [u'/omitted/library']
ansible python module location = /usr/lib/python2.7/dist-packages/ansible
executable location = /usr/bin/ansible
python version = 2.7.17 (default, Feb 27 2021, 15:10:58) [GCC 7.5.0]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
CACHE_PLUGIN(/omitted/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/omitted/ansible.cfg) = .fact_cache/
CACHE_PLUGIN_TIMEOUT(/omitted/ansible.cfg) = 86400
DEFAULT_FILTER_PLUGIN_PATH(env: ANSIBLE_FILTER_PLUGINS) = [u'/omitted/filter_plugins']
DEFAULT_GATHERING(/omitted/ansible.cfg) = implicit
DEFAULT_HOST_LIST(/omitted/ansible.cfg) = [u'/omitted/hosts']
DEFAULT_LOG_PATH(/omitted/ansible.cfg) = /omitted/ansible.log
DEFAULT_MODULE_PATH(/omitted/ansible.cfg) = [u'/omitted/library']
DEFAULT_REMOTE_USER(/omitted/ansible.cfg) = omitted
DEFAULT_ROLES_PATH(/omitted/ansible.cfg) = [u'/omitted/roles']
DEFAULT_VAULT_IDENTITY_LIST(/omitted/ansible.cfg) = omitted
RETRY_FILES_ENABLED(/omitted/ansible.cfg) = False
```
### OS / Environment
Ubuntu 18.04 LTS (bionic) using the ansible/ansible PPA
### Steps to Reproduce
Remove fact cache of hostname first.
```yaml (paste below)
- hosts: hostname
tasks:
- debug:
var: system_vendor
- debug:
var: interfaces
- debug:
var: ansible_facts
- debug:
var: system_vendor
- debug:
var: interfaces
- fail:
```
### Expected Results
I expected to be able to access the `interfaces` variable provided in my host vars. I did not expect to have a variable `system_vendor` in any scope.
In playbook order, I expected
1. Successful `setup`
1. not defined error
1. my `interfaces` variable
1. contents of `ansible_facts`
1. not defined error
1. my `interfaces` variable
1. fail as requested
### Actual Results
After calling debug with `var: ansible_facts`, non-prefixed keys of `ansible_facts` are injected into variables and appear to have a higher priority than host vars at least.
```console (paste below)
PLAY [groups] **********************************************************************************************************************************************************************
TASK [Gathering Facts] *******************************************************************************************************************************************************************************************************************************************************
ok: [hostname]
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VARIABLE IS NOT DEFINED!"
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": {
"dict_key1": "val1",
"dict_key2": "val2"
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"ansible_facts": {
[... not relevant ...],
"interfaces": [
"lo",
"eth0",
"eth1"
],
[... not relevant ...],
"system_vendor": "VMware, Inc.",
[... not relevant ...]
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VMware, Inc."
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": [
"lo",
"eth0",
"eth1"
]
}
TASK [fail] ******************************************************************************************************************************************************************************************************************************************************************
fatal: [hostname]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
PLAY RECAP *******************************************************************************************************************************************************************************************************************************************************************
hostname : ok=7 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
|
https://github.com/ansible/ansible/issues/74060
|
https://github.com/ansible/ansible/pull/74067
|
112a7718c6c776820a215562c91b092ed8e96ae1
|
f9f839fa08eee46ad7a86d6cbc7519541a50c7ef
| 2021-03-29T20:00:25Z |
python
| 2021-03-31T13:30:09Z |
lib/ansible/executor/task_executor.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import pty
import time
import json
import signal
import subprocess
import sys
import termios
import traceback
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleConnectionFailure, AnsibleActionFail, AnsibleActionSkip
from ansible.executor.task_result import TaskResult
from ansible.executor.module_common import get_action_args_with_defaults
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import iteritems, string_types, binary_type
from ansible.module_utils.six.moves import xrange
from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.connection import write_to_file_descriptor
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader
from ansible.template import Templar
from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var
from ansible.vars.clean import namespace_facts, clean_facts
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars, isidentifier
display = Display()
RETURN_VARS = [x for x in C.MAGIC_VARIABLE_MAPPING.items() if 'become' not in x and '_pass' not in x]
__all__ = ['TaskExecutor']
class TaskTimeoutError(BaseException):
pass
def task_timeout(signum, frame):
raise TaskTimeoutError
def remove_omit(task_args, omit_token):
'''
Remove args with a value equal to the ``omit_token`` recursively
to align with now having suboptions in the argument_spec
'''
if not isinstance(task_args, dict):
return task_args
new_args = {}
for i in iteritems(task_args):
if i[1] == omit_token:
continue
elif isinstance(i[1], dict):
new_args[i[0]] = remove_omit(i[1], omit_token)
elif isinstance(i[1], list):
new_args[i[0]] = [remove_omit(v, omit_token) for v in i[1]]
else:
new_args[i[0]] = i[1]
return new_args
class TaskExecutor:
'''
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, final_q):
self._host = host
self._task = task
self._job_vars = job_vars
self._play_context = play_context
self._new_stdin = new_stdin
self._loader = loader
self._shared_loader_obj = shared_loader_obj
self._connection = None
self._final_q = final_q
self._loop_eval_error = None
self._task.squash()
def run(self):
'''
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with self._run_loop()
or self._execute(). After that, the returned results are parsed and
returned as a dict.
'''
display.debug("in run() - task %s" % self._task._uuid)
try:
try:
items = self._get_loop_items()
except AnsibleUndefinedVariable as e:
# save the error raised here for use later
items = None
self._loop_eval_error = e
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
# create the overall result item
res = dict(results=item_results)
# loop through the item results and set the global changed/failed/skipped result flags based on any item.
res['skipped'] = True
for item in item_results:
if 'changed' in item and item['changed'] and not res.get('changed'):
res['changed'] = True
if res['skipped'] and ('skipped' not in item or ('skipped' in item and not item['skipped'])):
res['skipped'] = False
if 'failed' in item and item['failed']:
item_ignore = item.pop('_ansible_ignore_errors')
if not res.get('failed'):
res['failed'] = True
res['msg'] = 'One or more items failed'
self._task.ignore_errors = item_ignore
elif self._task.ignore_errors and not item_ignore:
self._task.ignore_errors = item_ignore
# ensure to accumulate these
for array in ['warnings', 'deprecations']:
if array in item and item[array]:
if array not in res:
res[array] = []
if not isinstance(item[array], list):
item[array] = [item[array]]
res[array] = res[array] + item[array]
del item[array]
if not res.get('failed', False):
res['msg'] = 'All items completed'
if res['skipped']:
res['msg'] = 'All items skipped'
else:
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
else:
display.debug("calling self._execute()")
res = self._execute()
display.debug("_execute() done")
# make sure changed is set in the result, if it's not present
if 'changed' not in res:
res['changed'] = False
def _clean_res(res, errors='surrogate_or_strict'):
if isinstance(res, binary_type):
return to_unsafe_text(res, errors=errors)
elif isinstance(res, dict):
for k in res:
try:
res[k] = _clean_res(res[k], errors=errors)
except UnicodeError:
if k == 'diff':
# If this is a diff, substitute a replacement character if the value
# is undecodable as utf8. (Fix #21804)
display.warning("We were unable to decode all characters in the module return data."
" Replaced some in an effort to return as much as possible")
res[k] = _clean_res(res[k], errors='surrogate_then_replace')
else:
raise
elif isinstance(res, list):
for idx, item in enumerate(res):
res[idx] = _clean_res(item, errors=errors)
return res
display.debug("dumping result to json")
res = _clean_res(res)
display.debug("done dumping result, returning")
return res
except AnsibleError as e:
return dict(failed=True, msg=wrap_var(to_text(e, nonstring='simplerepr')), _ansible_no_log=self._play_context.no_log)
except Exception as e:
return dict(failed=True, msg='Unexpected failure during module execution.', exception=to_text(traceback.format_exc()),
stdout='', _ansible_no_log=self._play_context.no_log)
finally:
try:
self._connection.close()
except AttributeError:
pass
except Exception as e:
display.debug(u"error closing connection: %s" % to_text(e))
def _get_loop_items(self):
'''
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
# get search path for this task to pass to lookup plugins
self._job_vars['ansible_search_path'] = self._task.get_search_path()
# ensure basedir is always in (dwim already searches here but we need to display it)
if self._loader.get_basedir() not in self._job_vars['ansible_search_path']:
self._job_vars['ansible_search_path'].append(self._loader.get_basedir())
templar = Templar(loader=self._loader, variables=self._job_vars)
items = None
loop_cache = self._job_vars.get('_ansible_loop_cache')
if loop_cache is not None:
# _ansible_loop_cache may be set in `get_vars` when calculating `delegate_to`
# to avoid reprocessing the loop
items = loop_cache
elif self._task.loop_with:
if self._task.loop_with in self._shared_loader_obj.lookup_loader:
fail = True
if self._task.loop_with == 'first_found':
# first_found loops are special. If the item is undefined then we want to fall through to the next value rather than failing.
fail = False
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop, templar=templar, loader=self._loader, fail_on_undefined=fail,
convert_bare=False)
if not fail:
loop_terms = [t for t in loop_terms if not templar.is_template(t)]
# get lookup
mylookup = self._shared_loader_obj.lookup_loader.get(self._task.loop_with, loader=self._loader, templar=templar)
# give lookup task 'context' for subdir (mostly needed for first_found)
for subdir in ['template', 'var', 'file']: # TODO: move this to constants?
if subdir in self._task.action:
break
setattr(mylookup, '_subdir', subdir + 's')
# run lookup
items = wrap_var(mylookup.run(terms=loop_terms, variables=self._job_vars, wantlist=True))
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop_with)
elif self._task.loop is not None:
items = templar.template(self._task.loop)
if not isinstance(items, list):
raise AnsibleError(
"Invalid data passed to 'loop', it requires a list, got this instead: %s."
" Hint: If you passed a list/dict of just one element,"
" try adding wantlist=True to your lookup invocation or use q/query instead of lookup." % items
)
return items
def _run_loop(self, items):
'''
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
results = []
# make copies of the job vars and task so we can add the item to
# the variables and re-validate the task with the item variable
# task_vars = self._job_vars.copy()
task_vars = self._job_vars
loop_var = 'item'
index_var = None
label = None
loop_pause = 0
extended = False
templar = Templar(loader=self._loader, variables=self._job_vars)
# FIXME: move this to the object itself to allow post_validate to take care of templating (loop_control.post_validate)
if self._task.loop_control:
loop_var = templar.template(self._task.loop_control.loop_var)
index_var = templar.template(self._task.loop_control.index_var)
loop_pause = templar.template(self._task.loop_control.pause)
extended = templar.template(self._task.loop_control.extended)
# This may be 'None',so it is templated below after we ensure a value and an item is assigned
label = self._task.loop_control.label
# ensure we always have a label
if label is None:
label = '{{' + loop_var + '}}'
if loop_var in task_vars:
display.warning(u"The loop variable '%s' is already in use. "
u"You should set the `loop_var` value in the `loop_control` option for the task"
u" to something else to avoid variable collisions and unexpected behavior." % loop_var)
ran_once = False
no_log = False
items_len = len(items)
for item_index, item in enumerate(items):
task_vars['ansible_loop_var'] = loop_var
task_vars[loop_var] = item
if index_var:
task_vars['ansible_index_var'] = index_var
task_vars[index_var] = item_index
if extended:
task_vars['ansible_loop'] = {
'allitems': items,
'index': item_index + 1,
'index0': item_index,
'first': item_index == 0,
'last': item_index + 1 == items_len,
'length': items_len,
'revindex': items_len - item_index,
'revindex0': items_len - item_index - 1,
}
try:
task_vars['ansible_loop']['nextitem'] = items[item_index + 1]
except IndexError:
pass
if item_index - 1 >= 0:
task_vars['ansible_loop']['previtem'] = items[item_index - 1]
# Update template vars to reflect current loop iteration
templar.available_variables = task_vars
# pause between loop iterations
if loop_pause and ran_once:
try:
time.sleep(float(loop_pause))
except ValueError as e:
raise AnsibleError('Invalid pause value: %s, produced error: %s' % (loop_pause, to_native(e)))
else:
ran_once = True
try:
tmp_task = self._task.copy(exclude_parent=True, exclude_tasks=True)
tmp_task._parent = self._task._parent
tmp_play_context = self._play_context.copy()
except AnsibleParserError as e:
results.append(dict(failed=True, msg=to_text(e)))
continue
# now we swap the internal task and play context with their copies,
# execute, and swap them back so we can do the next iteration cleanly
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
res = self._execute(variables=task_vars)
task_fields = self._task.dump_attrs()
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
# update 'general no_log' based on specific no_log
no_log = no_log or tmp_task.no_log
# now update the result with the item info, and append the result
# to the list of results
res[loop_var] = item
res['ansible_loop_var'] = loop_var
if index_var:
res[index_var] = item_index
res['ansible_index_var'] = index_var
if extended:
res['ansible_loop'] = task_vars['ansible_loop']
res['_ansible_item_result'] = True
res['_ansible_ignore_errors'] = task_fields.get('ignore_errors')
# gets templated here unlike rest of loop_control fields, depends on loop_var above
try:
res['_ansible_item_label'] = templar.template(label, cache=False)
except AnsibleUndefinedVariable as e:
res.update({
'failed': True,
'msg': 'Failed to template loop_control.label: %s' % to_text(e)
})
tr = TaskResult(
self._host.name,
self._task._uuid,
res,
task_fields=task_fields,
)
if tr.is_failed() or tr.is_unreachable():
self._final_q.send_callback('v2_runner_item_on_failed', tr)
elif tr.is_skipped():
self._final_q.send_callback('v2_runner_item_on_skipped', tr)
else:
if getattr(self._task, 'diff', False):
self._final_q.send_callback('v2_on_file_diff', tr)
self._final_q.send_callback('v2_runner_item_on_ok', tr)
results.append(res)
del task_vars[loop_var]
# clear 'connection related' plugin variables for next iteration
if self._connection:
clear_plugins = {
'connection': self._connection._load_name,
'shell': self._connection._shell._load_name
}
if self._connection.become:
clear_plugins['become'] = self._connection.become._load_name
for plugin_type, plugin_name in iteritems(clear_plugins):
for var in C.config.get_plugin_vars(plugin_type, plugin_name):
if var in task_vars and var not in self._job_vars:
del task_vars[var]
self._task.no_log = no_log
return results
def _execute(self, variables=None):
'''
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
if variables is None:
variables = self._job_vars
templar = Templar(loader=self._loader, variables=variables)
context_validation_error = None
try:
# TODO: remove play_context as this does not take delegation into account, task itself should hold values
# for connection/shell/become/terminal plugin options to finalize.
# Kept for now for backwards compatibility and a few functions that are still exclusive to it.
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
self._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=variables, templar=templar)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
self._play_context.post_validate(templar=templar)
# now that the play context is finalized, if the remote_addr is not set
# default to using the host's address field as the remote address
if not self._play_context.remote_addr:
self._play_context.remote_addr = self._host.address
# We also add "magic" variables back into the variables dict to make sure
# a certain subset of variables exist.
self._play_context.update_vars(variables)
except AnsibleError as e:
# save the error, which we'll raise later if we don't end up
# skipping this task during the conditional evaluation step
context_validation_error = e
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
try:
if not self._task.evaluate_conditional(templar, variables):
display.debug("when evaluation is False, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional result was False', _ansible_no_log=self._play_context.no_log)
except AnsibleError as e:
# loop error takes precedence
if self._loop_eval_error is not None:
# Display the error from the conditional as well to prevent
# losing information useful for debugging.
display.v(to_text(e))
raise self._loop_eval_error # pylint: disable=raising-bad-type
raise
# Not skipping, if we had loop error raised earlier we need to raise it now to halt the execution of this task
if self._loop_eval_error is not None:
raise self._loop_eval_error # pylint: disable=raising-bad-type
# if we ran into an error while setting up the PlayContext, raise it now
if context_validation_error is not None:
raise context_validation_error # pylint: disable=raising-bad-type
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
if self._task.action in C._ACTION_ALL_INCLUDE_TASKS:
include_args = self._task.args.copy()
include_file = include_args.pop('_raw_params', None)
if not include_file:
return dict(failed=True, msg="No include file was specified to the include")
include_file = templar.template(include_file)
return dict(include=include_file, include_args=include_args)
# if this task is a IncludeRole, we just return now with a success code so the main thread can expand the task list for the given host
elif self._task.action in C._ACTION_INCLUDE_ROLE:
include_args = self._task.args.copy()
return dict(include_args=include_args)
# Now we do final validation on the task, which sets all fields to their final values.
try:
self._task.post_validate(templar=templar)
except AnsibleError:
raise
except Exception:
return dict(changed=False, failed=True, _ansible_no_log=self._play_context.no_log, exception=to_text(traceback.format_exc()))
if '_variable_params' in self._task.args:
variable_params = self._task.args.pop('_variable_params')
if isinstance(variable_params, dict):
if C.INJECT_FACTS_AS_VARS:
display.warning("Using a variable for a task's 'args' is unsafe in some situations "
"(see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-unsafe)")
variable_params.update(self._task.args)
self._task.args = variable_params
if self._task.delegate_to:
# use vars from delegated host (which already include task vars) instead of original host
cvars = variables.get('ansible_delegated_vars', {}).get(self._task.delegate_to, {})
orig_vars = templar.available_variables
else:
# just use normal host vars
cvars = orig_vars = variables
templar.available_variables = cvars
# get the connection and the handler for this execution
if (not self._connection or
not getattr(self._connection, 'connected', False) or
self._play_context.remote_addr != self._connection._play_context.remote_addr):
self._connection = self._get_connection(cvars, templar)
else:
# if connection is reused, its _play_context is no longer valid and needs
# to be replaced with the one templated above, in case other data changed
self._connection._play_context = self._play_context
plugin_vars = self._set_connection_options(cvars, templar)
templar.available_variables = orig_vars
# get handler
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
# Apply default params for action/module, if present
self._task.args = get_action_args_with_defaults(
self._task.action, self._task.args, self._task.module_defaults, templar, self._task._ansible_internal_redirect_list
)
# And filter out any fields which were set to default(omit), and got the omit token value
omit_token = variables.get('omit')
if omit_token is not None:
self._task.args = remove_omit(self._task.args, omit_token)
# Read some values from the task, so that we can modify them if need be
if self._task.until:
retries = self._task.retries
if retries is None:
retries = 3
elif retries <= 0:
retries = 1
else:
retries += 1
else:
retries = 1
delay = self._task.delay
if delay < 0:
delay = 1
# make a copy of the job vars here, in case we need to update them
# with the registered variable value later on when testing conditions
vars_copy = variables.copy()
display.debug("starting attempt loop")
result = None
for attempt in xrange(1, retries + 1):
display.debug("running the handler")
try:
if self._task.timeout:
old_sig = signal.signal(signal.SIGALRM, task_timeout)
signal.alarm(self._task.timeout)
result = self._handler.run(task_vars=variables)
except AnsibleActionSkip as e:
return dict(skipped=True, msg=to_text(e))
except AnsibleActionFail as e:
return dict(failed=True, msg=to_text(e))
except AnsibleConnectionFailure as e:
return dict(unreachable=True, msg=to_text(e))
except TaskTimeoutError as e:
msg = 'The %s action failed to execute in the expected time frame (%d) and was terminated' % (self._task.action, self._task.timeout)
return dict(failed=True, msg=msg)
finally:
if self._task.timeout:
signal.alarm(0)
old_sig = signal.signal(signal.SIGALRM, old_sig)
self._handler.cleanup()
display.debug("handler run complete")
# preserve no log
result["_ansible_no_log"] = self._play_context.no_log
# update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
if self._task.register:
if not isidentifier(self._task.register):
raise AnsibleError("Invalid variable name in 'register' specified: '%s'" % self._task.register)
vars_copy[self._task.register] = result = wrap_var(result)
if self._task.async_val > 0:
if self._task.poll > 0 and not result.get('skipped') and not result.get('failed'):
result = self._poll_async_result(result=result, templar=templar, task_vars=vars_copy)
# ensure no log is preserved
result["_ansible_no_log"] = self._play_context.no_log
# helper methods for use below in evaluating changed/failed_when
def _evaluate_changed_when_result(result):
if self._task.changed_when is not None and self._task.changed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.changed_when
result['changed'] = cond.evaluate_conditional(templar, vars_copy)
def _evaluate_failed_when_result(result):
if self._task.failed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.failed_when
failed_when_result = cond.evaluate_conditional(templar, vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
else:
failed_when_result = False
return failed_when_result
if 'ansible_facts' in result:
if self._task.action in C._ACTION_WITH_CLEAN_FACTS:
vars_copy.update(result['ansible_facts'])
else:
# TODO: cleaning of facts should eventually become part of taskresults instead of vars
af = wrap_var(result['ansible_facts'])
vars_copy['ansible_facts'] = combine_vars(vars_copy.get('ansible_facts', {}), namespace_facts(af))
if C.INJECT_FACTS_AS_VARS:
vars_copy.update(clean_facts(af))
# set the failed property if it was missing.
if 'failed' not in result:
# rc is here for backwards compatibility and modules that use it instead of 'failed'
if 'rc' in result and result['rc'] not in [0, "0"]:
result['failed'] = True
else:
result['failed'] = False
# Make attempts and retries available early to allow their use in changed/failed_when
if self._task.until:
result['attempts'] = attempt
# set the changed property if it was missing.
if 'changed' not in result:
result['changed'] = False
# re-update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
# This gives changed/failed_when access to additional recently modified
# attributes of result
if self._task.register:
vars_copy[self._task.register] = result = wrap_var(result)
# if we didn't skip this task, use the helpers to evaluate the changed/
# failed_when properties
if 'skipped' not in result:
_evaluate_changed_when_result(result)
_evaluate_failed_when_result(result)
if retries > 1:
cond = Conditional(loader=self._loader)
cond.when = self._task.until
if cond.evaluate_conditional(templar, vars_copy):
break
else:
# no conditional check, or it failed, so sleep for the specified time
if attempt < retries:
result['_ansible_retry'] = True
result['retries'] = retries
display.debug('Retrying task, attempt %d of %d' % (attempt, retries))
self._final_q.send_callback(
'v2_runner_retry',
TaskResult(
self._host.name,
self._task._uuid,
result,
task_fields=self._task.dump_attrs()
)
)
time.sleep(delay)
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
else:
if retries > 1:
# we ran out of attempts, so mark the result as failed
result['attempts'] = retries - 1
result['failed'] = True
# do the final update of the local variables here, for both registered
# values and any facts which may have been created
if self._task.register:
variables[self._task.register] = result = wrap_var(result)
if 'ansible_facts' in result:
if self._task.action in C._ACTION_WITH_CLEAN_FACTS:
variables.update(result['ansible_facts'])
else:
# TODO: cleaning of facts should eventually become part of taskresults instead of vars
af = wrap_var(result['ansible_facts'])
variables['ansible_facts'] = combine_vars(variables.get('ansible_facts', {}), namespace_facts(af))
if C.INJECT_FACTS_AS_VARS:
variables.update(clean_facts(af))
# save the notification target in the result, if it was specified, as
# this task may be running in a loop in which case the notification
# may be item-specific, ie. "notify: service {{item}}"
if self._task.notify is not None:
result['_ansible_notify'] = self._task.notify
# add the delegated vars to the result, so we can reference them
# on the results side without having to do any further templating
# also now add conneciton vars results when delegating
if self._task.delegate_to:
result["_ansible_delegated_vars"] = {'ansible_delegated_host': self._task.delegate_to}
for k in plugin_vars:
result["_ansible_delegated_vars"][k] = cvars.get(k)
# and return
display.debug("attempt loop complete, returning result")
return result
def _poll_async_result(self, result, templar, task_vars=None):
'''
Polls for the specified JID to be complete
'''
if task_vars is None:
task_vars = self._job_vars
async_jid = result.get('ansible_job_id')
if async_jid is None:
return dict(failed=True, msg="No job id was returned by the async task")
# Create a new pseudo-task to run the async_status module, and run
# that (with a sleep for "poll" seconds between each retry) until the
# async time limit is exceeded.
async_task = Task().load(dict(action='async_status jid=%s' % async_jid, environment=self._task.environment))
# FIXME: this is no longer the case, normal takes care of all, see if this can just be generalized
# Because this is an async task, the action handler is async. However,
# we need the 'normal' action handler for the status check, so get it
# now via the action_loader
async_handler = self._shared_loader_obj.action_loader.get(
'ansible.legacy.async_status',
task=async_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
time_left = self._task.async_val
while time_left > 0:
time.sleep(self._task.poll)
try:
async_result = async_handler.run(task_vars=task_vars)
# We do not bail out of the loop in cases where the failure
# is associated with a parsing error. The async_runner can
# have issues which result in a half-written/unparseable result
# file on disk, which manifests to the user as a timeout happening
# before it's time to timeout.
if (int(async_result.get('finished', 0)) == 1 or
('failed' in async_result and async_result.get('_ansible_parsed', False)) or
'skipped' in async_result):
break
except Exception as e:
# Connections can raise exceptions during polling (eg, network bounce, reboot); these should be non-fatal.
# On an exception, call the connection's reset method if it has one
# (eg, drop/recreate WinRM connection; some reused connections are in a broken state)
display.vvvv("Exception during async poll, retrying... (%s)" % to_text(e))
display.debug("Async poll exception was:\n%s" % to_text(traceback.format_exc()))
try:
async_handler._connection.reset()
except AttributeError:
pass
# Little hack to raise the exception if we've exhausted the timeout period
time_left -= self._task.poll
if time_left <= 0:
raise
else:
time_left -= self._task.poll
self._final_q.send_callback(
'v2_runner_on_async_poll',
TaskResult(
self._host.name,
async_task, # We send the full task here, because the controller knows nothing about it, the TE created it
async_result,
task_fields=self._task.dump_attrs(),
),
)
if int(async_result.get('finished', 0)) != 1:
if async_result.get('_ansible_parsed'):
return dict(failed=True, msg="async task did not complete within the requested time - %ss" % self._task.async_val)
else:
return dict(failed=True, msg="async task produced unparseable results", async_result=async_result)
else:
# If the async task finished, automatically cleanup the temporary
# status file left behind.
cleanup_task = Task().load(
{
'async_status': {
'jid': async_jid,
'mode': 'cleanup',
},
'environment': self._task.environment,
}
)
cleanup_handler = self._shared_loader_obj.action_loader.get(
'ansible.legacy.async_status',
task=cleanup_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
cleanup_handler.run(task_vars=task_vars)
cleanup_handler.cleanup(force=True)
async_handler.cleanup(force=True)
return async_result
def _get_become(self, name):
become = become_loader.get(name)
if not become:
raise AnsibleError("Invalid become method specified, could not find matching plugin: '%s'. "
"Use `ansible-doc -t become -l` to list available plugins." % name)
return become
def _get_connection(self, cvars, templar):
'''
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
# use magic var if it exists, if not, let task inheritance do it's thing.
if cvars.get('ansible_connection') is not None:
self._play_context.connection = templar.template(cvars['ansible_connection'])
else:
self._play_context.connection = self._task.connection
# TODO: play context has logic to update the connection for 'smart'
# (default value, will chose between ssh and paramiko) and 'persistent'
# (really paramiko), eventually this should move to task object itself.
connection_name = self._play_context.connection
# load connection
conn_type = connection_name
connection, plugin_load_context = self._shared_loader_obj.connection_loader.get_with_context(
conn_type,
self._play_context,
self._new_stdin,
task_uuid=self._task._uuid,
ansible_playbook_pid=to_text(os.getppid())
)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
# load become plugin if needed
if cvars.get('ansible_become') is not None:
become = boolean(templar.template(cvars['ansible_become']))
else:
become = self._task.become
if become:
if cvars.get('ansible_become_method'):
become_plugin = self._get_become(templar.template(cvars['ansible_become_method']))
else:
become_plugin = self._get_become(self._task.become_method)
try:
connection.set_become_plugin(become_plugin)
except AttributeError:
# Older connection plugin that does not support set_become_plugin
pass
if getattr(connection.become, 'require_tty', False) and not getattr(connection, 'has_tty', False):
raise AnsibleError(
"The '%s' connection does not provide a TTY which is required for the selected "
"become plugin: %s." % (conn_type, become_plugin.name)
)
# Backwards compat for connection plugins that don't support become plugins
# Just do this unconditionally for now, we could move it inside of the
# AttributeError above later
self._play_context.set_become_plugin(become_plugin.name)
# Also backwards compat call for those still using play_context
self._play_context.set_attributes_from_plugin(connection)
if any(((connection.supports_persistence and C.USE_PERSISTENT_CONNECTIONS), connection.force_persistence)):
self._play_context.timeout = connection.get_option('persistent_command_timeout')
display.vvvv('attempting to start connection', host=self._play_context.remote_addr)
display.vvvv('using connection plugin %s' % connection.transport, host=self._play_context.remote_addr)
options = self._get_persistent_connection_options(connection, cvars, templar)
socket_path = start_connection(self._play_context, options, self._task._uuid)
display.vvvv('local domain socket path is %s' % socket_path, host=self._play_context.remote_addr)
setattr(connection, '_socket_path', socket_path)
return connection
def _get_persistent_connection_options(self, connection, final_vars, templar):
option_vars = C.config.get_plugin_vars('connection', connection._load_name)
plugin = connection._sub_plugin
if plugin.get('type'):
option_vars.extend(C.config.get_plugin_vars(plugin['type'], plugin['name']))
options = {}
for k in option_vars:
if k in final_vars:
options[k] = templar.template(final_vars[k])
return options
def _set_plugin_options(self, plugin_type, variables, templar, task_keys):
try:
plugin = getattr(self._connection, '_%s' % plugin_type)
except AttributeError:
# Some plugins are assigned to private attrs, ``become`` is not
plugin = getattr(self._connection, plugin_type)
option_vars = C.config.get_plugin_vars(plugin_type, plugin._load_name)
options = {}
for k in option_vars:
if k in variables:
options[k] = templar.template(variables[k])
# TODO move to task method?
plugin.set_options(task_keys=task_keys, var_options=options)
return option_vars
def _set_connection_options(self, variables, templar):
# keep list of variable names possibly consumed
varnames = []
# grab list of usable vars for this plugin
option_vars = C.config.get_plugin_vars('connection', self._connection._load_name)
varnames.extend(option_vars)
# create dict of 'templated vars'
options = {'_extras': {}}
for k in option_vars:
if k in variables:
options[k] = templar.template(variables[k])
# add extras if plugin supports them
if getattr(self._connection, 'allow_extras', False):
for k in variables:
if k.startswith('ansible_%s_' % self._connection._load_name) and k not in options:
options['_extras'][k] = templar.template(variables[k])
task_keys = self._task.dump_attrs()
# The task_keys 'timeout' attr is the task's timeout, not the connection timeout.
# The connection timeout is threaded through the play_context for now.
task_keys['timeout'] = self._play_context.timeout
if self._play_context.password:
# The connection password is threaded through the play_context for
# now. This is something we ultimately want to avoid, but the first
# step is to get connection plugins pulling the password through the
# config system instead of directly accessing play_context.
task_keys['password'] = self._play_context.password
# set options with 'templated vars' specific to this plugin and dependent ones
self._connection.set_options(task_keys=task_keys, var_options=options)
varnames.extend(self._set_plugin_options('shell', variables, templar, task_keys))
if self._connection.become is not None:
if self._play_context.become_pass:
# FIXME: eventually remove from task and play_context, here for backwards compat
# keep out of play objects to avoid accidental disclosure, only become plugin should have
# The become pass is already in the play_context if given on
# the CLI (-K). Make the plugin aware of it in this case.
task_keys['become_pass'] = self._play_context.become_pass
varnames.extend(self._set_plugin_options('become', variables, templar, task_keys))
# FOR BACKWARDS COMPAT:
for option in ('become_user', 'become_flags', 'become_exe', 'become_pass'):
try:
setattr(self._play_context, option, self._connection.become.get_option(option))
except KeyError:
pass # some plugins don't support all base flags
self._play_context.prompt = self._connection.become.prompt
return varnames
def _get_action_handler(self, connection, templar):
'''
Returns the correct action plugin to handle the requestion task action
'''
module_collection, separator, module_name = self._task.action.rpartition(".")
module_prefix = module_name.split('_')[0]
if module_collection:
# For network modules, which look for one action plugin per platform, look for the
# action plugin in the same collection as the module by prefixing the action plugin
# with the same collection.
network_action = "{0}.{1}".format(module_collection, module_prefix)
else:
network_action = module_prefix
collections = self._task.collections
# let action plugin override module, fallback to 'normal' action plugin otherwise
if self._shared_loader_obj.action_loader.has_plugin(self._task.action, collection_list=collections):
handler_name = self._task.action
elif all((module_prefix in C.NETWORK_GROUP_MODULES, self._shared_loader_obj.action_loader.has_plugin(network_action, collection_list=collections))):
handler_name = network_action
display.vvvv("Using network group action {handler} for {action}".format(handler=handler_name,
action=self._task.action),
host=self._play_context.remote_addr)
else:
# use ansible.legacy.normal to allow (historic) local action_plugins/ override without collections search
handler_name = 'ansible.legacy.normal'
collections = None # until then, we don't want the task's collection list to be consulted; use the builtin
handler = self._shared_loader_obj.action_loader.get(
handler_name,
task=self._task,
connection=connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
collection_list=collections
)
if not handler:
raise AnsibleError("the handler '%s' was not found" % handler_name)
return handler
def start_connection(play_context, variables, task_uuid):
'''
Starts the persistent connection
'''
candidate_paths = [C.ANSIBLE_CONNECTION_PATH or os.path.dirname(sys.argv[0])]
candidate_paths.extend(os.environ.get('PATH', '').split(os.pathsep))
for dirname in candidate_paths:
ansible_connection = os.path.join(dirname, 'ansible-connection')
if os.path.isfile(ansible_connection):
display.vvvv("Found ansible-connection at path {0}".format(ansible_connection))
break
else:
raise AnsibleError("Unable to find location of 'ansible-connection'. "
"Please set or check the value of ANSIBLE_CONNECTION_PATH")
env = os.environ.copy()
env.update({
# HACK; most of these paths may change during the controller's lifetime
# (eg, due to late dynamic role includes, multi-playbook execution), without a way
# to invalidate/update, ansible-connection won't always see the same plugins the controller
# can.
'ANSIBLE_BECOME_PLUGINS': become_loader.print_paths(),
'ANSIBLE_CLICONF_PLUGINS': cliconf_loader.print_paths(),
'ANSIBLE_COLLECTIONS_PATH': to_native(os.pathsep.join(AnsibleCollectionConfig.collection_paths)),
'ANSIBLE_CONNECTION_PLUGINS': connection_loader.print_paths(),
'ANSIBLE_HTTPAPI_PLUGINS': httpapi_loader.print_paths(),
'ANSIBLE_NETCONF_PLUGINS': netconf_loader.print_paths(),
'ANSIBLE_TERMINAL_PLUGINS': terminal_loader.print_paths(),
})
python = sys.executable
master, slave = pty.openpty()
p = subprocess.Popen(
[python, ansible_connection, to_text(os.getppid()), to_text(task_uuid)],
stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
)
os.close(slave)
# We need to set the pty into noncanonical mode. This ensures that we
# can receive lines longer than 4095 characters (plus newline) without
# truncating.
old = termios.tcgetattr(master)
new = termios.tcgetattr(master)
new[3] = new[3] & ~termios.ICANON
try:
termios.tcsetattr(master, termios.TCSANOW, new)
write_to_file_descriptor(master, variables)
write_to_file_descriptor(master, play_context.serialize())
(stdout, stderr) = p.communicate()
finally:
termios.tcsetattr(master, termios.TCSANOW, old)
os.close(master)
if p.returncode == 0:
result = json.loads(to_text(stdout, errors='surrogate_then_replace'))
else:
try:
result = json.loads(to_text(stderr, errors='surrogate_then_replace'))
except getattr(json.decoder, 'JSONDecodeError', ValueError):
# JSONDecodeError only available on Python 3.5+
result = {'error': to_text(stderr, errors='surrogate_then_replace')}
if 'messages' in result:
for level, message in result['messages']:
if level == 'log':
display.display(message, log_only=True)
elif level in ('debug', 'v', 'vv', 'vvv', 'vvvv', 'vvvvv', 'vvvvvv'):
getattr(display, level)(message, host=play_context.remote_addr)
else:
if hasattr(display, level):
getattr(display, level)(message)
else:
display.vvvv(message, host=play_context.remote_addr)
if 'error' in result:
if play_context.verbosity > 2:
if result.get('exception'):
msg = "The full traceback is:\n" + result['exception']
display.display(msg, color=C.COLOR_ERROR)
raise AnsibleError(result['error'])
return result['socket_path']
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,060 |
Debug of ansible_facts causes injection of unprefixed variables
|
### Summary
When calling `debug` with `var: ansible_facts`, the unprefixed keys of `ansible_facts` are injected as variables and override at least some other variable precedences (host vars at least) within ansibles precedence order.
I believe the issue occurs in [ansible/executor/task_executor.py](https://github.com/ansible/ansible/blob/devel/lib/ansible/executor/task_executor.py#L715) as `ansible_facts` would be in results, but without `ansible_` prefixes in keys at this point. I think this issue may have been previously masked by the behavior of `AnsibleJ2Vars.__getitem__`, which changed in [this commit](https://github.com/ansible/ansible/commit/a2af8432f36ec8cc5368a747f1211d2b9ba01f2e#diff-5671ace2a63ef6117ee1fb489c22b1d1a3a0a0d6352b10a0f18d02ee89faa57aL84).
### Issue Type
Bug Report
### Component Name
- lib/ansible/executor/task_executor
- lib/ansible/template/vars
### Ansible Version
```console (paste below)
$ ansible --version
ansible 2.9.19
config file = omitted
configured module search path = [u'/omitted/library']
ansible python module location = /usr/lib/python2.7/dist-packages/ansible
executable location = /usr/bin/ansible
python version = 2.7.17 (default, Feb 27 2021, 15:10:58) [GCC 7.5.0]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
CACHE_PLUGIN(/omitted/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/omitted/ansible.cfg) = .fact_cache/
CACHE_PLUGIN_TIMEOUT(/omitted/ansible.cfg) = 86400
DEFAULT_FILTER_PLUGIN_PATH(env: ANSIBLE_FILTER_PLUGINS) = [u'/omitted/filter_plugins']
DEFAULT_GATHERING(/omitted/ansible.cfg) = implicit
DEFAULT_HOST_LIST(/omitted/ansible.cfg) = [u'/omitted/hosts']
DEFAULT_LOG_PATH(/omitted/ansible.cfg) = /omitted/ansible.log
DEFAULT_MODULE_PATH(/omitted/ansible.cfg) = [u'/omitted/library']
DEFAULT_REMOTE_USER(/omitted/ansible.cfg) = omitted
DEFAULT_ROLES_PATH(/omitted/ansible.cfg) = [u'/omitted/roles']
DEFAULT_VAULT_IDENTITY_LIST(/omitted/ansible.cfg) = omitted
RETRY_FILES_ENABLED(/omitted/ansible.cfg) = False
```
### OS / Environment
Ubuntu 18.04 LTS (bionic) using the ansible/ansible PPA
### Steps to Reproduce
Remove fact cache of hostname first.
```yaml (paste below)
- hosts: hostname
tasks:
- debug:
var: system_vendor
- debug:
var: interfaces
- debug:
var: ansible_facts
- debug:
var: system_vendor
- debug:
var: interfaces
- fail:
```
### Expected Results
I expected to be able to access the `interfaces` variable provided in my host vars. I did not expect to have a variable `system_vendor` in any scope.
In playbook order, I expected
1. Successful `setup`
1. not defined error
1. my `interfaces` variable
1. contents of `ansible_facts`
1. not defined error
1. my `interfaces` variable
1. fail as requested
### Actual Results
After calling debug with `var: ansible_facts`, non-prefixed keys of `ansible_facts` are injected into variables and appear to have a higher priority than host vars at least.
```console (paste below)
PLAY [groups] **********************************************************************************************************************************************************************
TASK [Gathering Facts] *******************************************************************************************************************************************************************************************************************************************************
ok: [hostname]
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VARIABLE IS NOT DEFINED!"
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": {
"dict_key1": "val1",
"dict_key2": "val2"
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"ansible_facts": {
[... not relevant ...],
"interfaces": [
"lo",
"eth0",
"eth1"
],
[... not relevant ...],
"system_vendor": "VMware, Inc.",
[... not relevant ...]
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VMware, Inc."
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": [
"lo",
"eth0",
"eth1"
]
}
TASK [fail] ******************************************************************************************************************************************************************************************************************************************************************
fatal: [hostname]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
PLAY RECAP *******************************************************************************************************************************************************************************************************************************************************************
hostname : ok=7 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
|
https://github.com/ansible/ansible/issues/74060
|
https://github.com/ansible/ansible/pull/74067
|
112a7718c6c776820a215562c91b092ed8e96ae1
|
f9f839fa08eee46ad7a86d6cbc7519541a50c7ef
| 2021-03-29T20:00:25Z |
python
| 2021-03-31T13:30:09Z |
lib/ansible/plugins/strategy/__init__.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import cmd
import functools
import os
import pprint
import sys
import threading
import time
from collections import deque
from multiprocessing import Lock
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible import context
from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleParserError, AnsibleUndefinedVariable
from ansible.executor import action_write_locks
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_result import TaskResult
from ansible.executor.task_queue_manager import CallbackSend
from ansible.module_utils.six.moves import queue as Queue
from ansible.module_utils.six import iteritems, itervalues, string_types
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.playbook.conditional import Conditional
from ansible.playbook.handler import Handler
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task_include import TaskInclude
from ansible.plugins import loader as plugin_loader
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars
from ansible.vars.clean import strip_internal_keys, module_response_deepcopy
display = Display()
__all__ = ['StrategyBase']
# This list can be an exact match, or start of string bound
# does not accept regex
ALWAYS_DELEGATE_FACT_PREFIXES = frozenset((
'discovered_interpreter_',
))
class StrategySentinel:
pass
_sentinel = StrategySentinel()
def post_process_whens(result, task, templar):
cond = None
if task.changed_when:
cond = Conditional(loader=templar._loader)
cond.when = task.changed_when
result['changed'] = cond.evaluate_conditional(templar, templar.available_variables)
if task.failed_when:
if cond is None:
cond = Conditional(loader=templar._loader)
cond.when = task.failed_when
failed_when_result = cond.evaluate_conditional(templar, templar.available_variables)
result['failed_when_result'] = result['failed'] = failed_when_result
def results_thread_main(strategy):
while True:
try:
result = strategy._final_q.get()
if isinstance(result, StrategySentinel):
break
elif isinstance(result, CallbackSend):
for arg in result.args:
if isinstance(arg, TaskResult):
strategy.normalize_task_result(arg)
break
strategy._tqm.send_callback(result.method_name, *result.args, **result.kwargs)
elif isinstance(result, TaskResult):
strategy.normalize_task_result(result)
with strategy._results_lock:
# only handlers have the listen attr, so this must be a handler
# we split up the results into two queues here to make sure
# handler and regular result processing don't cross wires
if 'listen' in result._task_fields:
strategy._handler_results.append(result)
else:
strategy._results.append(result)
else:
display.warning('Received an invalid object (%s) in the result queue: %r' % (type(result), result))
except (IOError, EOFError):
break
except Queue.Empty:
pass
def debug_closure(func):
"""Closure to wrap ``StrategyBase._process_pending_results`` and invoke the task debugger"""
@functools.wraps(func)
def inner(self, iterator, one_pass=False, max_passes=None, do_handlers=False):
status_to_stats_map = (
('is_failed', 'failures'),
('is_unreachable', 'dark'),
('is_changed', 'changed'),
('is_skipped', 'skipped'),
)
# We don't know the host yet, copy the previous states, for lookup after we process new results
prev_host_states = iterator._host_states.copy()
results = func(self, iterator, one_pass=one_pass, max_passes=max_passes, do_handlers=do_handlers)
_processed_results = []
for result in results:
task = result._task
host = result._host
_queued_task_args = self._queued_task_cache.pop((host.name, task._uuid), None)
task_vars = _queued_task_args['task_vars']
play_context = _queued_task_args['play_context']
# Try to grab the previous host state, if it doesn't exist use get_host_state to generate an empty state
try:
prev_host_state = prev_host_states[host.name]
except KeyError:
prev_host_state = iterator.get_host_state(host)
while result.needs_debugger(globally_enabled=self.debugger_active):
next_action = NextAction()
dbg = Debugger(task, host, task_vars, play_context, result, next_action)
dbg.cmdloop()
if next_action.result == NextAction.REDO:
# rollback host state
self._tqm.clear_failed_hosts()
iterator._host_states[host.name] = prev_host_state
for method, what in status_to_stats_map:
if getattr(result, method)():
self._tqm._stats.decrement(what, host.name)
self._tqm._stats.decrement('ok', host.name)
# redo
self._queue_task(host, task, task_vars, play_context)
_processed_results.extend(debug_closure(func)(self, iterator, one_pass))
break
elif next_action.result == NextAction.CONTINUE:
_processed_results.append(result)
break
elif next_action.result == NextAction.EXIT:
# Matches KeyboardInterrupt from bin/ansible
sys.exit(99)
else:
_processed_results.append(result)
return _processed_results
return inner
class StrategyBase:
'''
This is the base class for strategy plugins, which contains some common
code useful to all strategies like running handlers, cleanup actions, etc.
'''
# by default, strategies should support throttling but we allow individual
# strategies to disable this and either forego supporting it or managing
# the throttling internally (as `free` does)
ALLOW_BASE_THROTTLING = True
def __init__(self, tqm):
self._tqm = tqm
self._inventory = tqm.get_inventory()
self._workers = tqm._workers
self._variable_manager = tqm.get_variable_manager()
self._loader = tqm.get_loader()
self._final_q = tqm._final_q
self._step = context.CLIARGS.get('step', False)
self._diff = context.CLIARGS.get('diff', False)
# the task cache is a dictionary of tuples of (host.name, task._uuid)
# used to find the original task object of in-flight tasks and to store
# the task args/vars and play context info used to queue the task.
self._queued_task_cache = {}
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
# internal counters
self._pending_results = 0
self._pending_handler_results = 0
self._cur_worker = 0
# this dictionary is used to keep track of hosts that have
# outstanding tasks still in queue
self._blocked_hosts = dict()
# this dictionary is used to keep track of hosts that have
# flushed handlers
self._flushed_hosts = dict()
self._results = deque()
self._handler_results = deque()
self._results_lock = threading.Condition(threading.Lock())
# create the result processing thread for reading results in the background
self._results_thread = threading.Thread(target=results_thread_main, args=(self,))
self._results_thread.daemon = True
self._results_thread.start()
# holds the list of active (persistent) connections to be shutdown at
# play completion
self._active_connections = dict()
# Caches for get_host calls, to avoid calling excessively
# These values should be set at the top of the ``run`` method of each
# strategy plugin. Use ``_set_hosts_cache`` to set these values
self._hosts_cache = []
self._hosts_cache_all = []
self.debugger_active = C.ENABLE_TASK_DEBUGGER
def _set_hosts_cache(self, play, refresh=True):
"""Responsible for setting _hosts_cache and _hosts_cache_all
See comment in ``__init__`` for the purpose of these caches
"""
if not refresh and all((self._hosts_cache, self._hosts_cache_all)):
return
if Templar(None).is_template(play.hosts):
_pattern = 'all'
else:
_pattern = play.hosts or 'all'
self._hosts_cache_all = [h.name for h in self._inventory.get_hosts(pattern=_pattern, ignore_restrictions=True)]
self._hosts_cache = [h.name for h in self._inventory.get_hosts(play.hosts, order=play.order)]
def cleanup(self):
# close active persistent connections
for sock in itervalues(self._active_connections):
try:
conn = Connection(sock)
conn.reset()
except ConnectionError as e:
# most likely socket is already closed
display.debug("got an error while closing persistent connection: %s" % e)
self._final_q.put(_sentinel)
self._results_thread.join()
def run(self, iterator, play_context, result=0):
# execute one more pass through the iterator without peeking, to
# make sure that all of the hosts are advanced to their final task.
# This should be safe, as everything should be ITERATING_COMPLETE by
# this point, though the strategy may not advance the hosts itself.
for host in self._hosts_cache:
if host not in self._tqm._unreachable_hosts:
try:
iterator.get_next_task_for_host(self._inventory.hosts[host])
except KeyError:
iterator.get_next_task_for_host(self._inventory.get_host(host))
# save the failed/unreachable hosts, as the run_handlers()
# method will clear that information during its execution
failed_hosts = iterator.get_failed_hosts()
unreachable_hosts = self._tqm._unreachable_hosts.keys()
display.debug("running handlers")
handler_result = self.run_handlers(iterator, play_context)
if isinstance(handler_result, bool) and not handler_result:
result |= self._tqm.RUN_ERROR
elif not handler_result:
result |= handler_result
# now update with the hosts (if any) that failed or were
# unreachable during the handler execution phase
failed_hosts = set(failed_hosts).union(iterator.get_failed_hosts())
unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys())
# return the appropriate code, depending on the status hosts after the run
if not isinstance(result, bool) and result != self._tqm.RUN_OK:
return result
elif len(unreachable_hosts) > 0:
return self._tqm.RUN_UNREACHABLE_HOSTS
elif len(failed_hosts) > 0:
return self._tqm.RUN_FAILED_HOSTS
else:
return self._tqm.RUN_OK
def get_hosts_remaining(self, play):
self._set_hosts_cache(play, refresh=False)
ignore = set(self._tqm._failed_hosts).union(self._tqm._unreachable_hosts)
return [host for host in self._hosts_cache if host not in ignore]
def get_failed_hosts(self, play):
self._set_hosts_cache(play, refresh=False)
return [host for host in self._hosts_cache if host in self._tqm._failed_hosts]
def add_tqm_variables(self, vars, play):
'''
Base class method to add extra variables/information to the list of task
vars sent through the executor engine regarding the task queue manager state.
'''
vars['ansible_current_hosts'] = self.get_hosts_remaining(play)
vars['ansible_failed_hosts'] = self.get_failed_hosts(play)
def _queue_task(self, host, task, task_vars, play_context):
''' handles queueing the task up to be sent to a worker '''
display.debug("entering _queue_task() for %s/%s" % (host.name, task.action))
# Add a write lock for tasks.
# Maybe this should be added somewhere further up the call stack but
# this is the earliest in the code where we have task (1) extracted
# into its own variable and (2) there's only a single code path
# leading to the module being run. This is called by three
# functions: __init__.py::_do_handler_run(), linear.py::run(), and
# free.py::run() so we'd have to add to all three to do it there.
# The next common higher level is __init__.py::run() and that has
# tasks inside of play_iterator so we'd have to extract them to do it
# there.
if task.action not in action_write_locks.action_write_locks:
display.debug('Creating lock for %s' % task.action)
action_write_locks.action_write_locks[task.action] = Lock()
# create a templar and template things we need later for the queuing process
templar = Templar(loader=self._loader, variables=task_vars)
try:
throttle = int(templar.template(task.throttle))
except Exception as e:
raise AnsibleError("Failed to convert the throttle value to an integer.", obj=task._ds, orig_exc=e)
# and then queue the new task
try:
# Determine the "rewind point" of the worker list. This means we start
# iterating over the list of workers until the end of the list is found.
# Normally, that is simply the length of the workers list (as determined
# by the forks or serial setting), however a task/block/play may "throttle"
# that limit down.
rewind_point = len(self._workers)
if throttle > 0 and self.ALLOW_BASE_THROTTLING:
if task.run_once:
display.debug("Ignoring 'throttle' as 'run_once' is also set for '%s'" % task.get_name())
else:
if throttle <= rewind_point:
display.debug("task: %s, throttle: %d" % (task.get_name(), throttle))
rewind_point = throttle
queued = False
starting_worker = self._cur_worker
while True:
if self._cur_worker >= rewind_point:
self._cur_worker = 0
worker_prc = self._workers[self._cur_worker]
if worker_prc is None or not worker_prc.is_alive():
self._queued_task_cache[(host.name, task._uuid)] = {
'host': host,
'task': task,
'task_vars': task_vars,
'play_context': play_context
}
worker_prc = WorkerProcess(self._final_q, task_vars, host, task, play_context, self._loader, self._variable_manager, plugin_loader)
self._workers[self._cur_worker] = worker_prc
self._tqm.send_callback('v2_runner_on_start', host, task)
worker_prc.start()
display.debug("worker is %d (out of %d available)" % (self._cur_worker + 1, len(self._workers)))
queued = True
self._cur_worker += 1
if self._cur_worker >= rewind_point:
self._cur_worker = 0
if queued:
break
elif self._cur_worker == starting_worker:
time.sleep(0.0001)
if isinstance(task, Handler):
self._pending_handler_results += 1
else:
self._pending_results += 1
except (EOFError, IOError, AssertionError) as e:
# most likely an abort
display.debug("got an error while queuing: %s" % e)
return
display.debug("exiting _queue_task() for %s/%s" % (host.name, task.action))
def get_task_hosts(self, iterator, task_host, task):
if task.run_once:
host_list = [host for host in self._hosts_cache if host not in self._tqm._unreachable_hosts]
else:
host_list = [task_host.name]
return host_list
def get_delegated_hosts(self, result, task):
host_name = result.get('_ansible_delegated_vars', {}).get('ansible_delegated_host', None)
return [host_name or task.delegate_to]
def _set_always_delegated_facts(self, result, task):
"""Sets host facts for ``delegate_to`` hosts for facts that should
always be delegated
This operation mutates ``result`` to remove the always delegated facts
See ``ALWAYS_DELEGATE_FACT_PREFIXES``
"""
if task.delegate_to is None:
return
facts = result['ansible_facts']
always_keys = set()
_add = always_keys.add
for fact_key in facts:
for always_key in ALWAYS_DELEGATE_FACT_PREFIXES:
if fact_key.startswith(always_key):
_add(fact_key)
if always_keys:
_pop = facts.pop
always_facts = {
'ansible_facts': dict((k, _pop(k)) for k in list(facts) if k in always_keys)
}
host_list = self.get_delegated_hosts(result, task)
_set_host_facts = self._variable_manager.set_host_facts
for target_host in host_list:
_set_host_facts(target_host, always_facts)
def normalize_task_result(self, task_result):
"""Normalize a TaskResult to reference actual Host and Task objects
when only given the ``Host.name``, or the ``Task._uuid``
Only the ``Host.name`` and ``Task._uuid`` are commonly sent back from
the ``TaskExecutor`` or ``WorkerProcess`` due to performance concerns
Mutates the original object
"""
if isinstance(task_result._host, string_types):
# If the value is a string, it is ``Host.name``
task_result._host = self._inventory.get_host(to_text(task_result._host))
if isinstance(task_result._task, string_types):
# If the value is a string, it is ``Task._uuid``
queue_cache_entry = (task_result._host.name, task_result._task)
found_task = self._queued_task_cache.get(queue_cache_entry)['task']
original_task = found_task.copy(exclude_parent=True, exclude_tasks=True)
original_task._parent = found_task._parent
original_task.from_attrs(task_result._task_fields)
task_result._task = original_task
return task_result
@debug_closure
def _process_pending_results(self, iterator, one_pass=False, max_passes=None, do_handlers=False):
'''
Reads results off the final queue and takes appropriate action
based on the result (executing callbacks, updating state, etc.).
'''
ret_results = []
handler_templar = Templar(self._loader)
def search_handler_blocks_by_name(handler_name, handler_blocks):
# iterate in reversed order since last handler loaded with the same name wins
for handler_block in reversed(handler_blocks):
for handler_task in handler_block.block:
if handler_task.name:
if not handler_task.cached_name:
if handler_templar.is_template(handler_task.name):
handler_templar.available_variables = self._variable_manager.get_vars(play=iterator._play,
task=handler_task,
_hosts=self._hosts_cache,
_hosts_all=self._hosts_cache_all)
handler_task.name = handler_templar.template(handler_task.name)
handler_task.cached_name = True
try:
# first we check with the full result of get_name(), which may
# include the role name (if the handler is from a role). If that
# is not found, we resort to the simple name field, which doesn't
# have anything extra added to it.
candidates = (
handler_task.name,
handler_task.get_name(include_role_fqcn=False),
handler_task.get_name(include_role_fqcn=True),
)
if handler_name in candidates:
return handler_task
except (UndefinedError, AnsibleUndefinedVariable):
# We skip this handler due to the fact that it may be using
# a variable in the name that was conditionally included via
# set_fact or some other method, and we don't want to error
# out unnecessarily
continue
return None
cur_pass = 0
while True:
try:
self._results_lock.acquire()
if do_handlers:
task_result = self._handler_results.popleft()
else:
task_result = self._results.popleft()
except IndexError:
break
finally:
self._results_lock.release()
original_host = task_result._host
original_task = task_result._task
# all host status messages contain 2 entries: (msg, task_result)
role_ran = False
if task_result.is_failed():
role_ran = True
ignore_errors = original_task.ignore_errors
if not ignore_errors:
display.debug("marking %s as failed" % original_host.name)
if original_task.run_once:
# if we're using run_once, we have to fail every host here
for h in self._inventory.get_hosts(iterator._play.hosts):
if h.name not in self._tqm._unreachable_hosts:
state, _ = iterator.get_next_task_for_host(h, peek=True)
iterator.mark_host_failed(h)
state, new_task = iterator.get_next_task_for_host(h, peek=True)
else:
iterator.mark_host_failed(original_host)
# grab the current state and if we're iterating on the rescue portion
# of a block then we save the failed task in a special var for use
# within the rescue/always
state, _ = iterator.get_next_task_for_host(original_host, peek=True)
if iterator.is_failed(original_host) and state and state.run_state == iterator.ITERATING_COMPLETE:
self._tqm._failed_hosts[original_host.name] = True
# Use of get_active_state() here helps detect proper state if, say, we are in a rescue
# block from an included file (include_tasks). In a non-included rescue case, a rescue
# that starts with a new 'block' will have an active state of ITERATING_TASKS, so we also
# check the current state block tree to see if any blocks are rescuing.
if state and (iterator.get_active_state(state).run_state == iterator.ITERATING_RESCUE or
iterator.is_any_block_rescuing(state)):
self._tqm._stats.increment('rescued', original_host.name)
self._variable_manager.set_nonpersistent_facts(
original_host.name,
dict(
ansible_failed_task=original_task.serialize(),
ansible_failed_result=task_result._result,
),
)
else:
self._tqm._stats.increment('failures', original_host.name)
else:
self._tqm._stats.increment('ok', original_host.name)
self._tqm._stats.increment('ignored', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
self._tqm.send_callback('v2_runner_on_failed', task_result, ignore_errors=ignore_errors)
elif task_result.is_unreachable():
ignore_unreachable = original_task.ignore_unreachable
if not ignore_unreachable:
self._tqm._unreachable_hosts[original_host.name] = True
iterator._play._removed_hosts.append(original_host.name)
else:
self._tqm._stats.increment('skipped', original_host.name)
task_result._result['skip_reason'] = 'Host %s is unreachable' % original_host.name
self._tqm._stats.increment('dark', original_host.name)
self._tqm.send_callback('v2_runner_on_unreachable', task_result)
elif task_result.is_skipped():
self._tqm._stats.increment('skipped', original_host.name)
self._tqm.send_callback('v2_runner_on_skipped', task_result)
else:
role_ran = True
if original_task.loop:
# this task had a loop, and has more than one result, so
# loop over all of them instead of a single result
result_items = task_result._result.get('results', [])
else:
result_items = [task_result._result]
for result_item in result_items:
if '_ansible_notify' in result_item:
if task_result.is_changed():
# The shared dictionary for notified handlers is a proxy, which
# does not detect when sub-objects within the proxy are modified.
# So, per the docs, we reassign the list so the proxy picks up and
# notifies all other threads
for handler_name in result_item['_ansible_notify']:
found = False
# Find the handler using the above helper. First we look up the
# dependency chain of the current task (if it's from a role), otherwise
# we just look through the list of handlers in the current play/all
# roles and use the first one that matches the notify name
target_handler = search_handler_blocks_by_name(handler_name, iterator._play.handlers)
if target_handler is not None:
found = True
if target_handler.notify_host(original_host):
self._tqm.send_callback('v2_playbook_on_notify', target_handler, original_host)
for listening_handler_block in iterator._play.handlers:
for listening_handler in listening_handler_block.block:
listeners = getattr(listening_handler, 'listen', []) or []
if not listeners:
continue
listeners = listening_handler.get_validated_value(
'listen', listening_handler._valid_attrs['listen'], listeners, handler_templar
)
if handler_name not in listeners:
continue
else:
found = True
if listening_handler.notify_host(original_host):
self._tqm.send_callback('v2_playbook_on_notify', listening_handler, original_host)
# and if none were found, then we raise an error
if not found:
msg = ("The requested handler '%s' was not found in either the main handlers list nor in the listening "
"handlers list" % handler_name)
if C.ERROR_ON_MISSING_HANDLER:
raise AnsibleError(msg)
else:
display.warning(msg)
if 'add_host' in result_item:
# this task added a new host (add_host module)
new_host_info = result_item.get('add_host', dict())
self._add_host(new_host_info, result_item)
post_process_whens(result_item, original_task, handler_templar)
elif 'add_group' in result_item:
# this task added a new group (group_by module)
self._add_group(original_host, result_item)
post_process_whens(result_item, original_task, handler_templar)
if 'ansible_facts' in result_item:
# if delegated fact and we are delegating facts, we need to change target host for them
if original_task.delegate_to is not None and original_task.delegate_facts:
host_list = self.get_delegated_hosts(result_item, original_task)
else:
# Set facts that should always be on the delegated hosts
self._set_always_delegated_facts(result_item, original_task)
host_list = self.get_task_hosts(iterator, original_host, original_task)
if original_task.action in C._ACTION_INCLUDE_VARS:
for (var_name, var_value) in iteritems(result_item['ansible_facts']):
# find the host we're actually referring too here, which may
# be a host that is not really in inventory at all
for target_host in host_list:
self._variable_manager.set_host_variable(target_host, var_name, var_value)
else:
cacheable = result_item.pop('_ansible_facts_cacheable', False)
for target_host in host_list:
# so set_fact is a misnomer but 'cacheable = true' was meant to create an 'actual fact'
# to avoid issues with precedence and confusion with set_fact normal operation,
# we set BOTH fact and nonpersistent_facts (aka hostvar)
# when fact is retrieved from cache in subsequent operations it will have the lower precedence,
# but for playbook setting it the 'higher' precedence is kept
is_set_fact = original_task.action in C._ACTION_SET_FACT
if not is_set_fact or cacheable:
self._variable_manager.set_host_facts(target_host, result_item['ansible_facts'].copy())
if is_set_fact:
self._variable_manager.set_nonpersistent_facts(target_host, result_item['ansible_facts'].copy())
if 'ansible_stats' in result_item and 'data' in result_item['ansible_stats'] and result_item['ansible_stats']['data']:
if 'per_host' not in result_item['ansible_stats'] or result_item['ansible_stats']['per_host']:
host_list = self.get_task_hosts(iterator, original_host, original_task)
else:
host_list = [None]
data = result_item['ansible_stats']['data']
aggregate = 'aggregate' in result_item['ansible_stats'] and result_item['ansible_stats']['aggregate']
for myhost in host_list:
for k in data.keys():
if aggregate:
self._tqm._stats.update_custom_stats(k, data[k], myhost)
else:
self._tqm._stats.set_custom_stats(k, data[k], myhost)
if 'diff' in task_result._result:
if self._diff or getattr(original_task, 'diff', False):
self._tqm.send_callback('v2_on_file_diff', task_result)
if not isinstance(original_task, TaskInclude):
self._tqm._stats.increment('ok', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
# finally, send the ok for this task
self._tqm.send_callback('v2_runner_on_ok', task_result)
# register final results
if original_task.register:
host_list = self.get_task_hosts(iterator, original_host, original_task)
clean_copy = strip_internal_keys(module_response_deepcopy(task_result._result))
if 'invocation' in clean_copy:
del clean_copy['invocation']
for target_host in host_list:
self._variable_manager.set_nonpersistent_facts(target_host, {original_task.register: clean_copy})
if do_handlers:
self._pending_handler_results -= 1
else:
self._pending_results -= 1
if original_host.name in self._blocked_hosts:
del self._blocked_hosts[original_host.name]
# If this is a role task, mark the parent role as being run (if
# the task was ok or failed, but not skipped or unreachable)
if original_task._role is not None and role_ran: # TODO: and original_task.action not in C._ACTION_INCLUDE_ROLE:?
# lookup the role in the ROLE_CACHE to make sure we're dealing
# with the correct object and mark it as executed
for (entry, role_obj) in iteritems(iterator._play.ROLE_CACHE[original_task._role.get_name()]):
if role_obj._uuid == original_task._role._uuid:
role_obj._had_task_run[original_host.name] = True
ret_results.append(task_result)
if one_pass or max_passes is not None and (cur_pass + 1) >= max_passes:
break
cur_pass += 1
return ret_results
def _wait_on_handler_results(self, iterator, handler, notified_hosts):
'''
Wait for the handler tasks to complete, using a short sleep
between checks to ensure we don't spin lock
'''
ret_results = []
handler_results = 0
display.debug("waiting for handler results...")
while (self._pending_handler_results > 0 and
handler_results < len(notified_hosts) and
not self._tqm._terminated):
if self._tqm.has_dead_workers():
raise AnsibleError("A worker was found in a dead state")
results = self._process_pending_results(iterator, do_handlers=True)
ret_results.extend(results)
handler_results += len([
r._host for r in results if r._host in notified_hosts and
r.task_name == handler.name])
if self._pending_handler_results > 0:
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
display.debug("no more pending handlers, returning what we have")
return ret_results
def _wait_on_pending_results(self, iterator):
'''
Wait for the shared counter to drop to zero, using a short sleep
between checks to ensure we don't spin lock
'''
ret_results = []
display.debug("waiting for pending results...")
while self._pending_results > 0 and not self._tqm._terminated:
if self._tqm.has_dead_workers():
raise AnsibleError("A worker was found in a dead state")
results = self._process_pending_results(iterator)
ret_results.extend(results)
if self._pending_results > 0:
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
display.debug("no more pending results, returning what we have")
return ret_results
def _add_host(self, host_info, result_item):
'''
Helper function to add a new host to inventory based on a task result.
'''
changed = False
if host_info:
host_name = host_info.get('host_name')
# Check if host in inventory, add if not
if host_name not in self._inventory.hosts:
self._inventory.add_host(host_name, 'all')
self._hosts_cache_all.append(host_name)
changed = True
new_host = self._inventory.hosts.get(host_name)
# Set/update the vars for this host
new_host_vars = new_host.get_vars()
new_host_combined_vars = combine_vars(new_host_vars, host_info.get('host_vars', dict()))
if new_host_vars != new_host_combined_vars:
new_host.vars = new_host_combined_vars
changed = True
new_groups = host_info.get('groups', [])
for group_name in new_groups:
if group_name not in self._inventory.groups:
group_name = self._inventory.add_group(group_name)
changed = True
new_group = self._inventory.groups[group_name]
if new_group.add_host(self._inventory.hosts[host_name]):
changed = True
# reconcile inventory, ensures inventory rules are followed
if changed:
self._inventory.reconcile_inventory()
result_item['changed'] = changed
def _add_group(self, host, result_item):
'''
Helper function to add a group (if it does not exist), and to assign the
specified host to that group.
'''
changed = False
# the host here is from the executor side, which means it was a
# serialized/cloned copy and we'll need to look up the proper
# host object from the master inventory
real_host = self._inventory.hosts.get(host.name)
if real_host is None:
if host.name == self._inventory.localhost.name:
real_host = self._inventory.localhost
else:
raise AnsibleError('%s cannot be matched in inventory' % host.name)
group_name = result_item.get('add_group')
parent_group_names = result_item.get('parent_groups', [])
if group_name not in self._inventory.groups:
group_name = self._inventory.add_group(group_name)
for name in parent_group_names:
if name not in self._inventory.groups:
# create the new group and add it to inventory
self._inventory.add_group(name)
changed = True
group = self._inventory.groups[group_name]
for parent_group_name in parent_group_names:
parent_group = self._inventory.groups[parent_group_name]
new = parent_group.add_child_group(group)
if new and not changed:
changed = True
if real_host not in group.get_hosts():
changed = group.add_host(real_host)
if group not in real_host.get_groups():
changed = real_host.add_group(group)
if changed:
self._inventory.reconcile_inventory()
result_item['changed'] = changed
def _copy_included_file(self, included_file):
'''
A proven safe and performant way to create a copy of an included file
'''
ti_copy = included_file._task.copy(exclude_parent=True)
ti_copy._parent = included_file._task._parent
temp_vars = ti_copy.vars.copy()
temp_vars.update(included_file._vars)
ti_copy.vars = temp_vars
return ti_copy
def _load_included_file(self, included_file, iterator, is_handler=False):
'''
Loads an included YAML file of tasks, applying the optional set of variables.
'''
display.debug("loading included file: %s" % included_file._filename)
try:
data = self._loader.load_from_file(included_file._filename)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleError("included task files must contain a list of tasks")
ti_copy = self._copy_included_file(included_file)
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = included_file._task.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(included_file._task.tags) > 0:
raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task). "
"Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
obj=included_file._task._ds)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option",
version='2.12', collection_name='ansible.builtin')
included_file._task.tags = tags
block_list = load_list_of_blocks(
data,
play=iterator._play,
parent_block=ti_copy.build_parent_block(),
role=included_file._task._role,
use_handlers=is_handler,
loader=self._loader,
variable_manager=self._variable_manager,
)
# since we skip incrementing the stats when the task result is
# first processed, we do so now for each host in the list
for host in included_file._hosts:
self._tqm._stats.increment('ok', host.name)
except AnsibleError as e:
if isinstance(e, AnsibleFileNotFound):
reason = "Could not find or access '%s' on the Ansible Controller." % to_text(e.file_name)
else:
reason = to_text(e)
# mark all of the hosts including this file as failed, send callbacks,
# and increment the stats for this host
for host in included_file._hosts:
tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=reason))
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
self._tqm._stats.increment('failures', host.name)
self._tqm.send_callback('v2_runner_on_failed', tr)
return []
# finally, send the callback and return the list of blocks loaded
self._tqm.send_callback('v2_playbook_on_include', included_file)
display.debug("done processing included file")
return block_list
def run_handlers(self, iterator, play_context):
'''
Runs handlers on those hosts which have been notified.
'''
result = self._tqm.RUN_OK
for handler_block in iterator._play.handlers:
# FIXME: handlers need to support the rescue/always portions of blocks too,
# but this may take some work in the iterator and gets tricky when
# we consider the ability of meta tasks to flush handlers
for handler in handler_block.block:
if handler.notified_hosts:
result = self._do_handler_run(handler, handler.get_name(), iterator=iterator, play_context=play_context)
if not result:
break
return result
def _do_handler_run(self, handler, handler_name, iterator, play_context, notified_hosts=None):
# FIXME: need to use iterator.get_failed_hosts() instead?
# if not len(self.get_hosts_remaining(iterator._play)):
# self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
# result = False
# break
if notified_hosts is None:
notified_hosts = handler.notified_hosts[:]
# strategy plugins that filter hosts need access to the iterator to identify failed hosts
failed_hosts = self._filter_notified_failed_hosts(iterator, notified_hosts)
notified_hosts = self._filter_notified_hosts(notified_hosts)
notified_hosts += failed_hosts
if len(notified_hosts) > 0:
self._tqm.send_callback('v2_playbook_on_handler_task_start', handler)
bypass_host_loop = False
try:
action = plugin_loader.action_loader.get(handler.action, class_only=True, collection_list=handler.collections)
if getattr(action, 'BYPASS_HOST_LOOP', False):
bypass_host_loop = True
except KeyError:
# we don't care here, because the action may simply not have a
# corresponding action plugin
pass
host_results = []
for host in notified_hosts:
if not iterator.is_failed(host) or iterator._play.force_handlers:
task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=handler,
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
self.add_tqm_variables(task_vars, play=iterator._play)
templar = Templar(loader=self._loader, variables=task_vars)
if not handler.cached_name:
handler.name = templar.template(handler.name)
handler.cached_name = True
self._queue_task(host, handler, task_vars, play_context)
if templar.template(handler.run_once) or bypass_host_loop:
break
# collect the results from the handler run
host_results = self._wait_on_handler_results(iterator, handler, notified_hosts)
included_files = IncludedFile.process_include_results(
host_results,
iterator=iterator,
loader=self._loader,
variable_manager=self._variable_manager
)
result = True
if len(included_files) > 0:
for included_file in included_files:
try:
new_blocks = self._load_included_file(included_file, iterator=iterator, is_handler=True)
# for every task in each block brought in by the include, add the list
# of hosts which included the file to the notified_handlers dict
for block in new_blocks:
iterator._play.handlers.append(block)
for task in block.block:
task_name = task.get_name()
display.debug("adding task '%s' included in handler '%s'" % (task_name, handler_name))
task.notified_hosts = included_file._hosts[:]
result = self._do_handler_run(
handler=task,
handler_name=task_name,
iterator=iterator,
play_context=play_context,
notified_hosts=included_file._hosts[:],
)
if not result:
break
except AnsibleError as e:
for host in included_file._hosts:
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
display.warning(to_text(e))
continue
# remove hosts from notification list
handler.notified_hosts = [
h for h in handler.notified_hosts
if h not in notified_hosts]
display.debug("done running handlers, result is: %s" % result)
return result
def _filter_notified_failed_hosts(self, iterator, notified_hosts):
return []
def _filter_notified_hosts(self, notified_hosts):
'''
Filter notified hosts accordingly to strategy
'''
# As main strategy is linear, we do not filter hosts
# We return a copy to avoid race conditions
return notified_hosts[:]
def _take_step(self, task, host=None):
ret = False
msg = u'Perform task: %s ' % task
if host:
msg += u'on %s ' % host
msg += u'(N)o/(y)es/(c)ontinue: '
resp = display.prompt(msg)
if resp.lower() in ['y', 'yes']:
display.debug("User ran task")
ret = True
elif resp.lower() in ['c', 'continue']:
display.debug("User ran task and canceled step mode")
self._step = False
ret = True
else:
display.debug("User skipped task")
display.banner(msg)
return ret
def _cond_not_supported_warn(self, task_name):
display.warning("%s task does not support when conditional" % task_name)
def _execute_meta(self, task, play_context, iterator, target_host):
# meta tasks store their args in the _raw_params field of args,
# since they do not use k=v pairs, so get that
meta_action = task.args.get('_raw_params')
def _evaluate_conditional(h):
all_vars = self._variable_manager.get_vars(play=iterator._play, host=h, task=task,
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
templar = Templar(loader=self._loader, variables=all_vars)
return task.evaluate_conditional(templar, all_vars)
skipped = False
msg = ''
skip_reason = '%s conditional evaluated to False' % meta_action
self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
# These don't support "when" conditionals
if meta_action in ('noop', 'flush_handlers', 'refresh_inventory', 'reset_connection') and task.when:
self._cond_not_supported_warn(meta_action)
if meta_action == 'noop':
msg = "noop"
elif meta_action == 'flush_handlers':
self._flushed_hosts[target_host] = True
self.run_handlers(iterator, play_context)
self._flushed_hosts[target_host] = False
msg = "ran handlers"
elif meta_action == 'refresh_inventory':
self._inventory.refresh_inventory()
self._set_hosts_cache(iterator._play)
msg = "inventory successfully refreshed"
elif meta_action == 'clear_facts':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
hostname = host.get_name()
self._variable_manager.clear_facts(hostname)
msg = "facts cleared"
else:
skipped = True
skip_reason += ', not clearing facts and fact cache for %s' % target_host.name
elif meta_action == 'clear_host_errors':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
self._tqm._failed_hosts.pop(host.name, False)
self._tqm._unreachable_hosts.pop(host.name, False)
iterator._host_states[host.name].fail_state = iterator.FAILED_NONE
msg = "cleared host errors"
else:
skipped = True
skip_reason += ', not clearing host error state for %s' % target_host.name
elif meta_action == 'end_play':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
if host.name not in self._tqm._unreachable_hosts:
iterator._host_states[host.name].run_state = iterator.ITERATING_COMPLETE
msg = "ending play"
else:
skipped = True
skip_reason += ', continuing play'
elif meta_action == 'end_host':
if _evaluate_conditional(target_host):
iterator._host_states[target_host.name].run_state = iterator.ITERATING_COMPLETE
iterator._play._removed_hosts.append(target_host.name)
msg = "ending play for %s" % target_host.name
else:
skipped = True
skip_reason += ", continuing execution for %s" % target_host.name
# TODO: Nix msg here? Left for historical reasons, but skip_reason exists now.
msg = "end_host conditional evaluated to false, continuing execution for %s" % target_host.name
elif meta_action == 'role_complete':
# Allow users to use this in a play as reported in https://github.com/ansible/ansible/issues/22286?
# How would this work with allow_duplicates??
if task.implicit:
if target_host.name in task._role._had_task_run:
task._role._completed[target_host.name] = True
msg = 'role_complete for %s' % target_host.name
elif meta_action == 'reset_connection':
all_vars = self._variable_manager.get_vars(play=iterator._play, host=target_host, task=task,
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
templar = Templar(loader=self._loader, variables=all_vars)
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
play_context = play_context.set_task_and_variable_override(task=task, variables=all_vars, templar=templar)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
play_context.post_validate(templar=templar)
# now that the play context is finalized, if the remote_addr is not set
# default to using the host's address field as the remote address
if not play_context.remote_addr:
play_context.remote_addr = target_host.address
# We also add "magic" variables back into the variables dict to make sure
# a certain subset of variables exist.
play_context.update_vars(all_vars)
if target_host in self._active_connections:
connection = Connection(self._active_connections[target_host])
del self._active_connections[target_host]
else:
connection = plugin_loader.connection_loader.get(play_context.connection, play_context, os.devnull)
connection.set_options(task_keys=task.dump_attrs(), var_options=all_vars)
play_context.set_attributes_from_plugin(connection)
if connection:
try:
connection.reset()
msg = 'reset connection'
except ConnectionError as e:
# most likely socket is already closed
display.debug("got an error while closing persistent connection: %s" % e)
else:
msg = 'no connection, nothing to reset'
else:
raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
result = {'msg': msg}
if skipped:
result['skipped'] = True
result['skip_reason'] = skip_reason
else:
result['changed'] = False
display.vv("META: %s" % msg)
res = TaskResult(target_host, task, result)
if skipped:
self._tqm.send_callback('v2_runner_on_skipped', res)
return [res]
def get_hosts_left(self, iterator):
''' returns list of available hosts for this iterator by filtering out unreachables '''
hosts_left = []
for host in self._hosts_cache:
if host not in self._tqm._unreachable_hosts:
try:
hosts_left.append(self._inventory.hosts[host])
except KeyError:
hosts_left.append(self._inventory.get_host(host))
return hosts_left
def update_active_connections(self, results):
''' updates the current active persistent connections '''
for r in results:
if 'args' in r._task_fields:
socket_path = r._task_fields['args'].get('_ansible_socket')
if socket_path:
if r._host not in self._active_connections:
self._active_connections[r._host] = socket_path
class NextAction(object):
""" The next action after an interpreter's exit. """
REDO = 1
CONTINUE = 2
EXIT = 3
def __init__(self, result=EXIT):
self.result = result
class Debugger(cmd.Cmd):
prompt_continuous = '> ' # multiple lines
def __init__(self, task, host, task_vars, play_context, result, next_action):
# cmd.Cmd is old-style class
cmd.Cmd.__init__(self)
self.prompt = '[%s] %s (debug)> ' % (host, task)
self.intro = None
self.scope = {}
self.scope['task'] = task
self.scope['task_vars'] = task_vars
self.scope['host'] = host
self.scope['play_context'] = play_context
self.scope['result'] = result
self.next_action = next_action
def cmdloop(self):
try:
cmd.Cmd.cmdloop(self)
except KeyboardInterrupt:
pass
do_h = cmd.Cmd.do_help
def do_EOF(self, args):
"""Quit"""
return self.do_quit(args)
def do_quit(self, args):
"""Quit"""
display.display('User interrupted execution')
self.next_action.result = NextAction.EXIT
return True
do_q = do_quit
def do_continue(self, args):
"""Continue to next result"""
self.next_action.result = NextAction.CONTINUE
return True
do_c = do_continue
def do_redo(self, args):
"""Schedule task for re-execution. The re-execution may not be the next result"""
self.next_action.result = NextAction.REDO
return True
do_r = do_redo
def do_update_task(self, args):
"""Recreate the task from ``task._ds``, and template with updated ``task_vars``"""
templar = Templar(None, variables=self.scope['task_vars'])
task = self.scope['task']
task = task.load_data(task._ds)
task.post_validate(templar)
self.scope['task'] = task
do_u = do_update_task
def evaluate(self, args):
try:
return eval(args, globals(), self.scope)
except Exception:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def do_pprint(self, args):
"""Pretty Print"""
try:
result = self.evaluate(args)
display.display(pprint.pformat(result))
except Exception:
pass
do_p = do_pprint
def execute(self, args):
try:
code = compile(args + '\n', '<stdin>', 'single')
exec(code, globals(), self.scope)
except Exception:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def default(self, line):
try:
self.execute(line)
except Exception:
pass
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,060 |
Debug of ansible_facts causes injection of unprefixed variables
|
### Summary
When calling `debug` with `var: ansible_facts`, the unprefixed keys of `ansible_facts` are injected as variables and override at least some other variable precedences (host vars at least) within ansibles precedence order.
I believe the issue occurs in [ansible/executor/task_executor.py](https://github.com/ansible/ansible/blob/devel/lib/ansible/executor/task_executor.py#L715) as `ansible_facts` would be in results, but without `ansible_` prefixes in keys at this point. I think this issue may have been previously masked by the behavior of `AnsibleJ2Vars.__getitem__`, which changed in [this commit](https://github.com/ansible/ansible/commit/a2af8432f36ec8cc5368a747f1211d2b9ba01f2e#diff-5671ace2a63ef6117ee1fb489c22b1d1a3a0a0d6352b10a0f18d02ee89faa57aL84).
### Issue Type
Bug Report
### Component Name
- lib/ansible/executor/task_executor
- lib/ansible/template/vars
### Ansible Version
```console (paste below)
$ ansible --version
ansible 2.9.19
config file = omitted
configured module search path = [u'/omitted/library']
ansible python module location = /usr/lib/python2.7/dist-packages/ansible
executable location = /usr/bin/ansible
python version = 2.7.17 (default, Feb 27 2021, 15:10:58) [GCC 7.5.0]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
CACHE_PLUGIN(/omitted/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/omitted/ansible.cfg) = .fact_cache/
CACHE_PLUGIN_TIMEOUT(/omitted/ansible.cfg) = 86400
DEFAULT_FILTER_PLUGIN_PATH(env: ANSIBLE_FILTER_PLUGINS) = [u'/omitted/filter_plugins']
DEFAULT_GATHERING(/omitted/ansible.cfg) = implicit
DEFAULT_HOST_LIST(/omitted/ansible.cfg) = [u'/omitted/hosts']
DEFAULT_LOG_PATH(/omitted/ansible.cfg) = /omitted/ansible.log
DEFAULT_MODULE_PATH(/omitted/ansible.cfg) = [u'/omitted/library']
DEFAULT_REMOTE_USER(/omitted/ansible.cfg) = omitted
DEFAULT_ROLES_PATH(/omitted/ansible.cfg) = [u'/omitted/roles']
DEFAULT_VAULT_IDENTITY_LIST(/omitted/ansible.cfg) = omitted
RETRY_FILES_ENABLED(/omitted/ansible.cfg) = False
```
### OS / Environment
Ubuntu 18.04 LTS (bionic) using the ansible/ansible PPA
### Steps to Reproduce
Remove fact cache of hostname first.
```yaml (paste below)
- hosts: hostname
tasks:
- debug:
var: system_vendor
- debug:
var: interfaces
- debug:
var: ansible_facts
- debug:
var: system_vendor
- debug:
var: interfaces
- fail:
```
### Expected Results
I expected to be able to access the `interfaces` variable provided in my host vars. I did not expect to have a variable `system_vendor` in any scope.
In playbook order, I expected
1. Successful `setup`
1. not defined error
1. my `interfaces` variable
1. contents of `ansible_facts`
1. not defined error
1. my `interfaces` variable
1. fail as requested
### Actual Results
After calling debug with `var: ansible_facts`, non-prefixed keys of `ansible_facts` are injected into variables and appear to have a higher priority than host vars at least.
```console (paste below)
PLAY [groups] **********************************************************************************************************************************************************************
TASK [Gathering Facts] *******************************************************************************************************************************************************************************************************************************************************
ok: [hostname]
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VARIABLE IS NOT DEFINED!"
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": {
"dict_key1": "val1",
"dict_key2": "val2"
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"ansible_facts": {
[... not relevant ...],
"interfaces": [
"lo",
"eth0",
"eth1"
],
[... not relevant ...],
"system_vendor": "VMware, Inc.",
[... not relevant ...]
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VMware, Inc."
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": [
"lo",
"eth0",
"eth1"
]
}
TASK [fail] ******************************************************************************************************************************************************************************************************************************************************************
fatal: [hostname]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
PLAY RECAP *******************************************************************************************************************************************************************************************************************************************************************
hostname : ok=7 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
|
https://github.com/ansible/ansible/issues/74060
|
https://github.com/ansible/ansible/pull/74067
|
112a7718c6c776820a215562c91b092ed8e96ae1
|
f9f839fa08eee46ad7a86d6cbc7519541a50c7ef
| 2021-03-29T20:00:25Z |
python
| 2021-03-31T13:30:09Z |
test/integration/targets/debug/nosetfacts.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,060 |
Debug of ansible_facts causes injection of unprefixed variables
|
### Summary
When calling `debug` with `var: ansible_facts`, the unprefixed keys of `ansible_facts` are injected as variables and override at least some other variable precedences (host vars at least) within ansibles precedence order.
I believe the issue occurs in [ansible/executor/task_executor.py](https://github.com/ansible/ansible/blob/devel/lib/ansible/executor/task_executor.py#L715) as `ansible_facts` would be in results, but without `ansible_` prefixes in keys at this point. I think this issue may have been previously masked by the behavior of `AnsibleJ2Vars.__getitem__`, which changed in [this commit](https://github.com/ansible/ansible/commit/a2af8432f36ec8cc5368a747f1211d2b9ba01f2e#diff-5671ace2a63ef6117ee1fb489c22b1d1a3a0a0d6352b10a0f18d02ee89faa57aL84).
### Issue Type
Bug Report
### Component Name
- lib/ansible/executor/task_executor
- lib/ansible/template/vars
### Ansible Version
```console (paste below)
$ ansible --version
ansible 2.9.19
config file = omitted
configured module search path = [u'/omitted/library']
ansible python module location = /usr/lib/python2.7/dist-packages/ansible
executable location = /usr/bin/ansible
python version = 2.7.17 (default, Feb 27 2021, 15:10:58) [GCC 7.5.0]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
CACHE_PLUGIN(/omitted/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/omitted/ansible.cfg) = .fact_cache/
CACHE_PLUGIN_TIMEOUT(/omitted/ansible.cfg) = 86400
DEFAULT_FILTER_PLUGIN_PATH(env: ANSIBLE_FILTER_PLUGINS) = [u'/omitted/filter_plugins']
DEFAULT_GATHERING(/omitted/ansible.cfg) = implicit
DEFAULT_HOST_LIST(/omitted/ansible.cfg) = [u'/omitted/hosts']
DEFAULT_LOG_PATH(/omitted/ansible.cfg) = /omitted/ansible.log
DEFAULT_MODULE_PATH(/omitted/ansible.cfg) = [u'/omitted/library']
DEFAULT_REMOTE_USER(/omitted/ansible.cfg) = omitted
DEFAULT_ROLES_PATH(/omitted/ansible.cfg) = [u'/omitted/roles']
DEFAULT_VAULT_IDENTITY_LIST(/omitted/ansible.cfg) = omitted
RETRY_FILES_ENABLED(/omitted/ansible.cfg) = False
```
### OS / Environment
Ubuntu 18.04 LTS (bionic) using the ansible/ansible PPA
### Steps to Reproduce
Remove fact cache of hostname first.
```yaml (paste below)
- hosts: hostname
tasks:
- debug:
var: system_vendor
- debug:
var: interfaces
- debug:
var: ansible_facts
- debug:
var: system_vendor
- debug:
var: interfaces
- fail:
```
### Expected Results
I expected to be able to access the `interfaces` variable provided in my host vars. I did not expect to have a variable `system_vendor` in any scope.
In playbook order, I expected
1. Successful `setup`
1. not defined error
1. my `interfaces` variable
1. contents of `ansible_facts`
1. not defined error
1. my `interfaces` variable
1. fail as requested
### Actual Results
After calling debug with `var: ansible_facts`, non-prefixed keys of `ansible_facts` are injected into variables and appear to have a higher priority than host vars at least.
```console (paste below)
PLAY [groups] **********************************************************************************************************************************************************************
TASK [Gathering Facts] *******************************************************************************************************************************************************************************************************************************************************
ok: [hostname]
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VARIABLE IS NOT DEFINED!"
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": {
"dict_key1": "val1",
"dict_key2": "val2"
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"ansible_facts": {
[... not relevant ...],
"interfaces": [
"lo",
"eth0",
"eth1"
],
[... not relevant ...],
"system_vendor": "VMware, Inc.",
[... not relevant ...]
}
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"system_vendor": "VMware, Inc."
}
TASK [debug] *****************************************************************************************************************************************************************************************************************************************************************
ok: [hostname] => {
"interfaces": [
"lo",
"eth0",
"eth1"
]
}
TASK [fail] ******************************************************************************************************************************************************************************************************************************************************************
fatal: [hostname]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
PLAY RECAP *******************************************************************************************************************************************************************************************************************************************************************
hostname : ok=7 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
|
https://github.com/ansible/ansible/issues/74060
|
https://github.com/ansible/ansible/pull/74067
|
112a7718c6c776820a215562c91b092ed8e96ae1
|
f9f839fa08eee46ad7a86d6cbc7519541a50c7ef
| 2021-03-29T20:00:25Z |
python
| 2021-03-31T13:30:09Z |
test/integration/targets/debug/runme.sh
|
#!/usr/bin/env bash
set -eux
trap 'rm -f out' EXIT
ansible-playbook main.yml -i ../../inventory | tee out
for i in 1 2 3; do
grep "ok: \[localhost\] => (item=$i)" out
grep "\"item\": $i" out
done
ansible-playbook main_fqcn.yml -i ../../inventory | tee out
for i in 1 2 3; do
grep "ok: \[localhost\] => (item=$i)" out
grep "\"item\": $i" out
done
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,149 |
Python 3.5: 'ERROR! A worker was found in a dead state' showing up a lot more often
|
### Summary
In several collection CIs, @briantist and me noticed that `ERROR! A worker was found in a dead state` started showing up a lot more often since < 24 h, but only with Python 3.5.
In the CIs I've observed this in, `ansible-test` is invoked with `ansible-test integration -v --color --retry-on-error --continue-on-error --diff --python 3.5 --docker` and running itself under Python 3.8.
### Issue Type
Bug Report
### Component Name
core / ansible-test
### Ansible Version
```console (paste below)
stable-2.11
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
A run from felixfontein/ansible-tools:
```
2021-04-06T05:13:49.6948640Z ##[group]Run actions/setup-python@v2
2021-04-06T05:13:49.6949245Z with:
2021-04-06T05:13:49.6949727Z python-version: 3.8
2021-04-06T05:13:49.6950709Z token: ***
2021-04-06T05:13:49.6951172Z ##[endgroup]
2021-04-06T05:13:49.7678082Z Successfully setup CPython (3.8.8)
2021-04-06T05:13:49.7747685Z ##[group]Run pip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check
2021-04-06T05:13:49.7749339Z [36;1mpip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check[0m
2021-04-06T05:13:49.7796866Z shell: /usr/bin/bash -e {0}
2021-04-06T05:13:49.7797401Z env:
2021-04-06T05:13:49.7798043Z pythonLocation: /opt/hostedtoolcache/Python/3.8.8/x64
2021-04-06T05:13:49.7799087Z LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.8/x64/lib
2021-04-06T05:13:49.7799732Z ##[endgroup]
2021-04-06T05:13:50.3521030Z Collecting https://github.com/ansible/ansible/archive/stable-2.11.tar.gz
2021-04-06T05:13:50.3939817Z Downloading https://github.com/ansible/ansible/archive/stable-2.11.tar.gz (5.9 MB)
2021-04-06T05:13:53.1193924Z Collecting jinja2
2021-04-06T05:13:53.1433511Z Downloading Jinja2-2.11.3-py2.py3-none-any.whl (125 kB)
2021-04-06T05:13:53.2652915Z Collecting PyYAML
2021-04-06T05:13:53.2697213Z Downloading PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl (662 kB)
2021-04-06T05:13:53.8364433Z Collecting cryptography
2021-04-06T05:13:53.8426076Z Downloading cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl (3.2 MB)
2021-04-06T05:13:54.0112801Z Collecting packaging
2021-04-06T05:13:54.0154167Z Downloading packaging-20.9-py2.py3-none-any.whl (40 kB)
2021-04-06T05:13:54.0489830Z Collecting resolvelib<0.6.0,>=0.5.3
2021-04-06T05:13:54.0530164Z Downloading resolvelib-0.5.4-py2.py3-none-any.whl (12 kB)
2021-04-06T05:13:54.9601955Z Collecting cffi>=1.12
2021-04-06T05:13:54.9613930Z Downloading cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl (411 kB)
2021-04-06T05:13:54.9615051Z Collecting pycparser
2021-04-06T05:13:54.9616413Z Downloading pycparser-2.20-py2.py3-none-any.whl (112 kB)
2021-04-06T05:13:54.9618703Z Collecting MarkupSafe>=0.23
2021-04-06T05:13:54.9621238Z Downloading MarkupSafe-1.1.1-cp38-cp38-manylinux2010_x86_64.whl (32 kB)
2021-04-06T05:13:54.9624670Z Collecting pyparsing>=2.0.2
2021-04-06T05:13:54.9625789Z Downloading pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
2021-04-06T05:13:54.9627021Z Using legacy 'setup.py install' for ansible-core, since package 'wheel' is not installed.
2021-04-06T05:13:54.9628563Z Installing collected packages: pycparser, pyparsing, MarkupSafe, cffi, resolvelib, PyYAML, packaging, jinja2, cryptography, ansible-core
2021-04-06T05:13:55.5541871Z Running setup.py install for ansible-core: started
2021-04-06T05:13:57.5371824Z Running setup.py install for ansible-core: finished with status 'done'
2021-04-06T05:13:57.5845493Z Successfully installed MarkupSafe-1.1.1 PyYAML-5.4.1 ansible-core-2.11.0rc1.post0 cffi-1.14.5 cryptography-3.4.7 jinja2-2.11.3 packaging-20.9 pycparser-2.20 pyparsing-2.4.7 resolvelib-0.5.4
2021-04-06T05:13:57.8352266Z ##[group]Run git clone --depth=1 --single-branch https://github.com/ansible-collections/community.general.git ./ansible_collections/community/general
2021-04-06T05:13:57.8354189Z [36;1mgit clone --depth=1 --single-branch https://github.com/ansible-collections/community.general.git ./ansible_collections/community/general[0m
2021-04-06T05:13:57.8399226Z shell: /usr/bin/bash -e {0}
2021-04-06T05:13:57.8399628Z env:
2021-04-06T05:13:57.8400399Z pythonLocation: /opt/hostedtoolcache/Python/3.8.8/x64
2021-04-06T05:13:57.8401183Z LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.8/x64/lib
2021-04-06T05:13:57.8401733Z ##[endgroup]
2021-04-06T05:13:57.8489367Z Cloning into './ansible_collections/community/general'...
2021-04-06T05:13:58.4851582Z ##[group]Run ansible-test integration -v --color --retry-on-error --continue-on-error --diff --python 3.5 --docker --coverage
2021-04-06T05:13:58.4853106Z [36;1mansible-test integration -v --color --retry-on-error --continue-on-error --diff --python 3.5 --docker --coverage[0m
2021-04-06T05:13:58.4901623Z shell: /usr/bin/bash -e {0}
2021-04-06T05:13:58.4902051Z env:
2021-04-06T05:13:58.4902616Z pythonLocation: /opt/hostedtoolcache/Python/3.8.8/x64
2021-04-06T05:13:58.4903359Z LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.8/x64/lib
2021-04-06T05:13:58.4903918Z ##[endgroup]
2021-04-06T05:13:58.7271498Z [32mFalling back to tests in "tests/integration/targets/" because "roles/test/" was not found.[0m
2021-04-06T05:13:58.7302044Z [32mRun command: docker images quay.io/ansible/default-test-container:3.1.0 --format '{{json .}}'[0m
2021-04-06T05:13:59.0199281Z [32mRun command: docker pull quay.io/ansible/default-test-container:3.1.0[0m
2021-04-06T05:13:59.2784910Z 3.1.0: Pulling from ansible/default-test-container
2021-04-06T05:13:59.2810454Z f22ccc0b8772: Pulling fs layer
[...]
2021-04-06T05:14:30.2046776Z b799b5892cf0: Pull complete
2021-04-06T05:14:30.2099406Z Digest: sha256:a88f080db38e6aefc446f45ebfcc861f34e71d278b97c47fdc653f63062b8f12
2021-04-06T05:14:30.2118420Z Status: Downloaded newer image for quay.io/ansible/default-test-container:3.1.0
2021-04-06T05:14:30.2161129Z quay.io/ansible/default-test-container:3.1.0
2021-04-06T05:14:30.2400654Z [32mScanning collection root: /home/runner/work/ansible-tools/ansible-tools/ansible_collections[0m
2021-04-06T05:14:30.2402148Z [32mRun command: git ls-files -z --cached --others --exclude-standard[0m
2021-04-06T05:14:30.2573188Z [32mRun command: git ls-files -z --deleted[0m
2021-04-06T05:14:30.2772983Z [32mRun command: git submodule status --recursive[0m
2021-04-06T05:14:30.3141641Z [32mIncluding collection: community.general (2745 files)[0m
2021-04-06T05:14:30.3143310Z [32mIncluding collection: felixfontein.tools (60 files)[0m
2021-04-06T05:14:30.3282673Z [32mCreating a payload archive containing 3614 files...[0m
2021-04-06T05:14:31.2530291Z [32mCreated a 3671942 byte payload archive containing 3614 files in 0 seconds.[0m
2021-04-06T05:14:31.2535617Z [32mAssuming Docker is available on localhost.[0m
2021-04-06T05:14:31.2539218Z [32mRun command: docker run --detach --volume /sys/fs/cgroup:/sys/fs/cgroup:ro --privileged=false --security-opt seccomp=unconfined --volume /var/run/docker.sock:/var/run/docker.sock quay.io/ansible/default-test-container:3.1.0[0m
2021-04-06T05:14:34.0402998Z [32mRun command: ssh-keygen -m PEM -q -t rsa -N '' -f /home/runner/.ansible/test/id_rsa[0m
2021-04-06T05:14:34.6922757Z [32mRun command: docker exec -i 56324fe634d11b359151a72528ec6fa065bdd45405216bbbed205a08ac59e959 /bin/sh[0m
2021-04-06T05:14:34.8058311Z [32mRun command: docker exec -i 56324fe634d11b359151a72528ec6fa065bdd45405216bbbed205a08ac59e959 dd of=/root/test.tgz bs=65536[0m
2021-04-06T05:14:34.9300143Z [32mRun command: docker exec 56324fe634d11b359151a72528ec6fa065bdd45405216bbbed205a08ac59e959 tar oxzf /root/test.tgz -C /root[0m
2021-04-06T05:14:35.2485284Z [32mRun command: docker exec 56324fe634d11b359151a72528ec6fa065bdd45405216bbbed205a08ac59e959 /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/felixfontein/tools LC_ALL=en_US.UTF-8 /usr/bin/python3.5 /root/ansible/bin/ansible-test integration -v --retry-on-error --continue-on-error --diff --python 3.5 --coverage --metadata tests/output/.tmp/metadata-l3bovzqn.json --truncate 0 --redact --color yes --requirements --coverage-label docker-default --allow-destructive[0m
2021-04-06T05:14:35.6415061Z [32mFalling back to tests in "tests/integration/targets/" because "roles/test/" was not found.[0m
2021-04-06T05:14:35.6432923Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/ansible-test.txt[0m
2021-04-06T05:14:36.3863248Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check setuptools -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt[0m
2021-04-06T05:14:37.1962822Z [32mRun command: /usr/bin/python3.5 -c 'import setuptools; print(setuptools.__version__)'[0m
2021-04-06T05:14:37.4053429Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/sslcheck.py[0m
2021-04-06T05:14:37.4528938Z [32mDetected OpenSSL version 1.1.1 under Python 3.5.[0m
2021-04-06T05:14:37.4534211Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check 'cryptography < 3.4' -c /root/ansible/test/lib/ansible_test/_data/cryptography-constraints.txt[0m
2021-04-06T05:14:38.1937829Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/integration.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt[0m
2021-04-06T05:14:39.0080553Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py check --disable-pip-version-check[0m
2021-04-06T05:14:39.5189977Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/yamlcheck.py[0m
2021-04-06T05:14:40.1024105Z Running filter_dict integration test role
2021-04-06T05:14:40.1145356Z [32mInjecting "/tmp/python-9p9xd2z3-ansible/python" as a execv wrapper for the "/usr/bin/python3.5" interpreter.[0m
2021-04-06T05:14:40.1149207Z [32mRun command: ansible-playbook filter_dict-y0kg6mvg.yml -i inventory --diff -v[0m
2021-04-06T05:14:41.2119324Z [0;34mUsing /root/ansible_collections/felixfontein/tools/tests/output/.tmp/integration/filter_dict-drb3a1sm-ÅÑŚÌβŁÈ/tests/integration/integration.cfg as config file[0m
2021-04-06T05:14:41.6551038Z /root/ansible/lib/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.
2021-04-06T05:14:41.6552634Z from cryptography.exceptions import InvalidSignature
2021-04-06T05:14:41.6554204Z [1;35m[WARNING]: running playbook inside collection felixfontein.tools[0m
2021-04-06T05:14:41.8362461Z
2021-04-06T05:14:41.8363128Z PLAY [testhost] ****************************************************************
2021-04-06T05:14:41.8781439Z
2021-04-06T05:14:41.8782271Z TASK [Gathering Facts] *********************************************************
2021-04-06T05:14:43.0374223Z [0;32mok: [testhost][0m
2021-04-06T05:14:43.0789754Z
2021-04-06T05:14:43.0791127Z TASK [filter_dict : Test list_to_dict filter] **********************************
2021-04-06T05:14:43.4940300Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:43.4940995Z [0;32m "changed": false,[0m
2021-04-06T05:14:43.4941998Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:43.4942559Z [0;32m}[0m
2021-04-06T05:14:43.5134985Z
2021-04-06T05:14:43.5135725Z TASK [filter_dict : Test dict filter] ******************************************
2021-04-06T05:14:43.9143949Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:43.9144634Z [0;32m "changed": false,[0m
2021-04-06T05:14:43.9145300Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:43.9145856Z [0;32m}[0m
2021-04-06T05:14:43.9692771Z
2021-04-06T05:14:43.9693487Z PLAY RECAP *********************************************************************
2021-04-06T05:14:43.9694972Z [0;32mtesthost[0m : [0;32mok=3 [0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
2021-04-06T05:14:43.9695532Z
2021-04-06T05:14:44.6182759Z Running filter_domain_suffix integration test role
2021-04-06T05:14:44.6206329Z [32mRun command: ansible-playbook filter_domain_suffix-mg3h6oek.yml -i inventory --diff -v[0m
2021-04-06T05:14:45.5306057Z [0;34mUsing /root/ansible_collections/felixfontein/tools/tests/output/.tmp/integration/filter_domain_suffix-nppzzuha-ÅÑŚÌβŁÈ/tests/integration/integration.cfg as config file[0m
2021-04-06T05:14:45.9180313Z /root/ansible/lib/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.
2021-04-06T05:14:45.9181828Z from cryptography.exceptions import InvalidSignature
2021-04-06T05:14:45.9183590Z [1;35m[WARNING]: running playbook inside collection felixfontein.tools[0m
2021-04-06T05:14:46.0064933Z
2021-04-06T05:14:46.0065754Z PLAY [testhost] ****************************************************************
2021-04-06T05:14:46.0250739Z
2021-04-06T05:14:46.0251719Z TASK [Gathering Facts] *********************************************************
2021-04-06T05:14:47.9723817Z [0;32mok: [testhost][0m
2021-04-06T05:14:47.9737138Z
2021-04-06T05:14:47.9738013Z TASK [filter_domain_suffix : Test dns_zone filter] *****************************
2021-04-06T05:14:47.9739170Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:47.9740135Z [0;32m "changed": false,[0m
2021-04-06T05:14:47.9741174Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:47.9742073Z [0;32m}[0m
2021-04-06T05:14:47.9742584Z
2021-04-06T05:14:47.9743343Z TASK [filter_domain_suffix : Test dns_zone_prefix filter] **********************
2021-04-06T05:14:48.2293291Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:48.2293949Z [0;32m "changed": false,[0m
2021-04-06T05:14:48.2294719Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:48.2295288Z [0;32m}[0m
2021-04-06T05:14:48.2496559Z
2021-04-06T05:14:48.2497247Z TASK [filter_domain_suffix : Test get_domain_suffix filter] ********************
2021-04-06T05:14:48.7374839Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:48.7375466Z [0;32m "changed": false,[0m
2021-04-06T05:14:48.7376098Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:48.7376855Z [0;32m}[0m
2021-04-06T05:14:48.7567646Z
2021-04-06T05:14:48.7568414Z TASK [filter_domain_suffix : Test remove_domain_suffix filter] *****************
2021-04-06T05:14:49.2268037Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:49.2268712Z [0;32m "changed": false,[0m
2021-04-06T05:14:49.2269471Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:49.2270026Z [0;32m}[0m
2021-04-06T05:14:49.2463827Z
2021-04-06T05:14:49.2464469Z TASK [filter_domain_suffix : Test registrable_domain filter] *******************
2021-04-06T05:14:49.7611216Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:49.7611967Z [0;32m "changed": false,[0m
2021-04-06T05:14:49.7612631Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:49.7613199Z [0;32m}[0m
2021-04-06T05:14:49.8152567Z
2021-04-06T05:14:49.8153305Z PLAY RECAP *********************************************************************
2021-04-06T05:14:49.8161623Z [0;32mtesthost[0m : [0;32mok=6 [0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
2021-04-06T05:14:49.8163251Z
2021-04-06T05:14:50.4611385Z Running filter_path_join_shim integration test role
2021-04-06T05:14:50.4634889Z [32mRun command: ansible-playbook filter_path_join_shim-n0es6gwd.yml -i inventory --diff -v[0m
2021-04-06T05:14:51.4093789Z [0;34mUsing /root/ansible_collections/felixfontein/tools/tests/output/.tmp/integration/filter_path_join_shim-06p9xgfj-ÅÑŚÌβŁÈ/tests/integration/integration.cfg as config file[0m
2021-04-06T05:14:51.8222028Z /root/ansible/lib/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.
2021-04-06T05:14:51.8223585Z from cryptography.exceptions import InvalidSignature
2021-04-06T05:14:51.8225626Z [1;35m[WARNING]: running playbook inside collection felixfontein.tools[0m
2021-04-06T05:14:51.9091083Z
2021-04-06T05:14:51.9091902Z PLAY [testhost] ****************************************************************
2021-04-06T05:14:51.9390767Z
2021-04-06T05:14:51.9391508Z TASK [Gathering Facts] *********************************************************
2021-04-06T05:14:53.1231842Z [0;32mok: [testhost][0m
2021-04-06T05:14:53.1644200Z
2021-04-06T05:14:53.1645785Z TASK [filter_path_join_shim : Test path_join filter] ***************************
2021-04-06T05:14:53.3062185Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:53.3062847Z [0;32m "changed": false,[0m
2021-04-06T05:14:53.3063607Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:53.3064161Z [0;32m}[0m
2021-04-06T05:14:53.3620476Z
2021-04-06T05:14:53.3622373Z PLAY RECAP *********************************************************************
2021-04-06T05:14:53.3623877Z [0;32mtesthost[0m : [0;32mok=2 [0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
2021-04-06T05:14:53.3624738Z
2021-04-06T05:14:53.9806394Z Running lookup_dependent integration test role
2021-04-06T05:14:53.9852850Z [32mRun command: ansible-playbook lookup_dependent-vjzxhqmn.yml -i inventory --diff -v[0m
2021-04-06T05:14:54.8912378Z [0;34mUsing /root/ansible_collections/felixfontein/tools/tests/output/.tmp/integration/lookup_dependent-ks276ivk-ÅÑŚÌβŁÈ/tests/integration/integration.cfg as config file[0m
2021-04-06T05:14:55.2949931Z /root/ansible/lib/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.
2021-04-06T05:14:55.2951594Z from cryptography.exceptions import InvalidSignature
2021-04-06T05:14:55.2953161Z [1;35m[WARNING]: running playbook inside collection felixfontein.tools[0m
2021-04-06T05:14:55.3926942Z
2021-04-06T05:14:55.3932048Z PLAY [testhost] ****************************************************************
2021-04-06T05:14:55.4154241Z
2021-04-06T05:14:55.4155440Z TASK [Gathering Facts] *********************************************************
2021-04-06T05:14:56.5738357Z [0;32mok: [testhost][0m
2021-04-06T05:14:56.6180053Z
2021-04-06T05:14:56.6181844Z TASK [lookup_dependent : Test 1: using with with_dependent] ********************
2021-04-06T05:14:56.8097679Z [0;32mok: [testhost] => (item={0: 1, 1: 4, 2: 41}) => {[0m
2021-04-06T05:14:56.8098356Z [0;32m "msg": "1 4 41"[0m
2021-04-06T05:14:56.8098836Z [0;32m}[0m
2021-04-06T05:14:56.8185068Z [0;32mok: [testhost] => (item={0: 1, 1: 7, 2: 71}) => {[0m
2021-04-06T05:14:56.8185700Z [0;32m "msg": "1 7 71"[0m
2021-04-06T05:14:56.8186194Z [0;32m}[0m
2021-04-06T05:14:56.8209276Z [0;32mok: [testhost] => (item={0: 2, 1: 5, 2: 52}) => {[0m
2021-04-06T05:14:56.8209986Z [0;32m "msg": "2 5 52"[0m
2021-04-06T05:14:56.8210507Z [0;32m}[0m
2021-04-06T05:14:56.8232524Z [0;32mok: [testhost] => (item={0: 2, 1: 8, 2: 82}) => {[0m
2021-04-06T05:14:56.8233198Z [0;32m "msg": "2 8 82"[0m
2021-04-06T05:14:56.8233698Z [0;32m}[0m
2021-04-06T05:14:56.8253803Z [0;31mERROR! A worker was found in a dead state[0m
2021-04-06T05:14:57.4619523Z [35mWARNING: Retrying test target "lookup_dependent" with maximum verbosity.[0m
(there is no more output with higher verbosity, so I'm skipping the remainder)
```
|
https://github.com/ansible/ansible/issues/74149
|
https://github.com/ansible/ansible/pull/74156
|
d1842afd59ea087b0d2c5081d1c6cbc295c57aba
|
96f94c0fef18cf25f772a2b241b9c4ce9ab9b74e
| 2021-04-06T05:23:55Z |
python
| 2021-04-06T16:26:52Z |
changelogs/fragments/fix-for-workerprocess-stdout-deadlock-fix.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,149 |
Python 3.5: 'ERROR! A worker was found in a dead state' showing up a lot more often
|
### Summary
In several collection CIs, @briantist and me noticed that `ERROR! A worker was found in a dead state` started showing up a lot more often since < 24 h, but only with Python 3.5.
In the CIs I've observed this in, `ansible-test` is invoked with `ansible-test integration -v --color --retry-on-error --continue-on-error --diff --python 3.5 --docker` and running itself under Python 3.8.
### Issue Type
Bug Report
### Component Name
core / ansible-test
### Ansible Version
```console (paste below)
stable-2.11
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
A run from felixfontein/ansible-tools:
```
2021-04-06T05:13:49.6948640Z ##[group]Run actions/setup-python@v2
2021-04-06T05:13:49.6949245Z with:
2021-04-06T05:13:49.6949727Z python-version: 3.8
2021-04-06T05:13:49.6950709Z token: ***
2021-04-06T05:13:49.6951172Z ##[endgroup]
2021-04-06T05:13:49.7678082Z Successfully setup CPython (3.8.8)
2021-04-06T05:13:49.7747685Z ##[group]Run pip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check
2021-04-06T05:13:49.7749339Z [36;1mpip install https://github.com/ansible/ansible/archive/stable-2.11.tar.gz --disable-pip-version-check[0m
2021-04-06T05:13:49.7796866Z shell: /usr/bin/bash -e {0}
2021-04-06T05:13:49.7797401Z env:
2021-04-06T05:13:49.7798043Z pythonLocation: /opt/hostedtoolcache/Python/3.8.8/x64
2021-04-06T05:13:49.7799087Z LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.8/x64/lib
2021-04-06T05:13:49.7799732Z ##[endgroup]
2021-04-06T05:13:50.3521030Z Collecting https://github.com/ansible/ansible/archive/stable-2.11.tar.gz
2021-04-06T05:13:50.3939817Z Downloading https://github.com/ansible/ansible/archive/stable-2.11.tar.gz (5.9 MB)
2021-04-06T05:13:53.1193924Z Collecting jinja2
2021-04-06T05:13:53.1433511Z Downloading Jinja2-2.11.3-py2.py3-none-any.whl (125 kB)
2021-04-06T05:13:53.2652915Z Collecting PyYAML
2021-04-06T05:13:53.2697213Z Downloading PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl (662 kB)
2021-04-06T05:13:53.8364433Z Collecting cryptography
2021-04-06T05:13:53.8426076Z Downloading cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl (3.2 MB)
2021-04-06T05:13:54.0112801Z Collecting packaging
2021-04-06T05:13:54.0154167Z Downloading packaging-20.9-py2.py3-none-any.whl (40 kB)
2021-04-06T05:13:54.0489830Z Collecting resolvelib<0.6.0,>=0.5.3
2021-04-06T05:13:54.0530164Z Downloading resolvelib-0.5.4-py2.py3-none-any.whl (12 kB)
2021-04-06T05:13:54.9601955Z Collecting cffi>=1.12
2021-04-06T05:13:54.9613930Z Downloading cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl (411 kB)
2021-04-06T05:13:54.9615051Z Collecting pycparser
2021-04-06T05:13:54.9616413Z Downloading pycparser-2.20-py2.py3-none-any.whl (112 kB)
2021-04-06T05:13:54.9618703Z Collecting MarkupSafe>=0.23
2021-04-06T05:13:54.9621238Z Downloading MarkupSafe-1.1.1-cp38-cp38-manylinux2010_x86_64.whl (32 kB)
2021-04-06T05:13:54.9624670Z Collecting pyparsing>=2.0.2
2021-04-06T05:13:54.9625789Z Downloading pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
2021-04-06T05:13:54.9627021Z Using legacy 'setup.py install' for ansible-core, since package 'wheel' is not installed.
2021-04-06T05:13:54.9628563Z Installing collected packages: pycparser, pyparsing, MarkupSafe, cffi, resolvelib, PyYAML, packaging, jinja2, cryptography, ansible-core
2021-04-06T05:13:55.5541871Z Running setup.py install for ansible-core: started
2021-04-06T05:13:57.5371824Z Running setup.py install for ansible-core: finished with status 'done'
2021-04-06T05:13:57.5845493Z Successfully installed MarkupSafe-1.1.1 PyYAML-5.4.1 ansible-core-2.11.0rc1.post0 cffi-1.14.5 cryptography-3.4.7 jinja2-2.11.3 packaging-20.9 pycparser-2.20 pyparsing-2.4.7 resolvelib-0.5.4
2021-04-06T05:13:57.8352266Z ##[group]Run git clone --depth=1 --single-branch https://github.com/ansible-collections/community.general.git ./ansible_collections/community/general
2021-04-06T05:13:57.8354189Z [36;1mgit clone --depth=1 --single-branch https://github.com/ansible-collections/community.general.git ./ansible_collections/community/general[0m
2021-04-06T05:13:57.8399226Z shell: /usr/bin/bash -e {0}
2021-04-06T05:13:57.8399628Z env:
2021-04-06T05:13:57.8400399Z pythonLocation: /opt/hostedtoolcache/Python/3.8.8/x64
2021-04-06T05:13:57.8401183Z LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.8/x64/lib
2021-04-06T05:13:57.8401733Z ##[endgroup]
2021-04-06T05:13:57.8489367Z Cloning into './ansible_collections/community/general'...
2021-04-06T05:13:58.4851582Z ##[group]Run ansible-test integration -v --color --retry-on-error --continue-on-error --diff --python 3.5 --docker --coverage
2021-04-06T05:13:58.4853106Z [36;1mansible-test integration -v --color --retry-on-error --continue-on-error --diff --python 3.5 --docker --coverage[0m
2021-04-06T05:13:58.4901623Z shell: /usr/bin/bash -e {0}
2021-04-06T05:13:58.4902051Z env:
2021-04-06T05:13:58.4902616Z pythonLocation: /opt/hostedtoolcache/Python/3.8.8/x64
2021-04-06T05:13:58.4903359Z LD_LIBRARY_PATH: /opt/hostedtoolcache/Python/3.8.8/x64/lib
2021-04-06T05:13:58.4903918Z ##[endgroup]
2021-04-06T05:13:58.7271498Z [32mFalling back to tests in "tests/integration/targets/" because "roles/test/" was not found.[0m
2021-04-06T05:13:58.7302044Z [32mRun command: docker images quay.io/ansible/default-test-container:3.1.0 --format '{{json .}}'[0m
2021-04-06T05:13:59.0199281Z [32mRun command: docker pull quay.io/ansible/default-test-container:3.1.0[0m
2021-04-06T05:13:59.2784910Z 3.1.0: Pulling from ansible/default-test-container
2021-04-06T05:13:59.2810454Z f22ccc0b8772: Pulling fs layer
[...]
2021-04-06T05:14:30.2046776Z b799b5892cf0: Pull complete
2021-04-06T05:14:30.2099406Z Digest: sha256:a88f080db38e6aefc446f45ebfcc861f34e71d278b97c47fdc653f63062b8f12
2021-04-06T05:14:30.2118420Z Status: Downloaded newer image for quay.io/ansible/default-test-container:3.1.0
2021-04-06T05:14:30.2161129Z quay.io/ansible/default-test-container:3.1.0
2021-04-06T05:14:30.2400654Z [32mScanning collection root: /home/runner/work/ansible-tools/ansible-tools/ansible_collections[0m
2021-04-06T05:14:30.2402148Z [32mRun command: git ls-files -z --cached --others --exclude-standard[0m
2021-04-06T05:14:30.2573188Z [32mRun command: git ls-files -z --deleted[0m
2021-04-06T05:14:30.2772983Z [32mRun command: git submodule status --recursive[0m
2021-04-06T05:14:30.3141641Z [32mIncluding collection: community.general (2745 files)[0m
2021-04-06T05:14:30.3143310Z [32mIncluding collection: felixfontein.tools (60 files)[0m
2021-04-06T05:14:30.3282673Z [32mCreating a payload archive containing 3614 files...[0m
2021-04-06T05:14:31.2530291Z [32mCreated a 3671942 byte payload archive containing 3614 files in 0 seconds.[0m
2021-04-06T05:14:31.2535617Z [32mAssuming Docker is available on localhost.[0m
2021-04-06T05:14:31.2539218Z [32mRun command: docker run --detach --volume /sys/fs/cgroup:/sys/fs/cgroup:ro --privileged=false --security-opt seccomp=unconfined --volume /var/run/docker.sock:/var/run/docker.sock quay.io/ansible/default-test-container:3.1.0[0m
2021-04-06T05:14:34.0402998Z [32mRun command: ssh-keygen -m PEM -q -t rsa -N '' -f /home/runner/.ansible/test/id_rsa[0m
2021-04-06T05:14:34.6922757Z [32mRun command: docker exec -i 56324fe634d11b359151a72528ec6fa065bdd45405216bbbed205a08ac59e959 /bin/sh[0m
2021-04-06T05:14:34.8058311Z [32mRun command: docker exec -i 56324fe634d11b359151a72528ec6fa065bdd45405216bbbed205a08ac59e959 dd of=/root/test.tgz bs=65536[0m
2021-04-06T05:14:34.9300143Z [32mRun command: docker exec 56324fe634d11b359151a72528ec6fa065bdd45405216bbbed205a08ac59e959 tar oxzf /root/test.tgz -C /root[0m
2021-04-06T05:14:35.2485284Z [32mRun command: docker exec 56324fe634d11b359151a72528ec6fa065bdd45405216bbbed205a08ac59e959 /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/felixfontein/tools LC_ALL=en_US.UTF-8 /usr/bin/python3.5 /root/ansible/bin/ansible-test integration -v --retry-on-error --continue-on-error --diff --python 3.5 --coverage --metadata tests/output/.tmp/metadata-l3bovzqn.json --truncate 0 --redact --color yes --requirements --coverage-label docker-default --allow-destructive[0m
2021-04-06T05:14:35.6415061Z [32mFalling back to tests in "tests/integration/targets/" because "roles/test/" was not found.[0m
2021-04-06T05:14:35.6432923Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/ansible-test.txt[0m
2021-04-06T05:14:36.3863248Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check setuptools -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt[0m
2021-04-06T05:14:37.1962822Z [32mRun command: /usr/bin/python3.5 -c 'import setuptools; print(setuptools.__version__)'[0m
2021-04-06T05:14:37.4053429Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/sslcheck.py[0m
2021-04-06T05:14:37.4528938Z [32mDetected OpenSSL version 1.1.1 under Python 3.5.[0m
2021-04-06T05:14:37.4534211Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check 'cryptography < 3.4' -c /root/ansible/test/lib/ansible_test/_data/cryptography-constraints.txt[0m
2021-04-06T05:14:38.1937829Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/integration.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt[0m
2021-04-06T05:14:39.0080553Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py check --disable-pip-version-check[0m
2021-04-06T05:14:39.5189977Z [32mRun command: /usr/bin/python3.5 /root/ansible/test/lib/ansible_test/_data/yamlcheck.py[0m
2021-04-06T05:14:40.1024105Z Running filter_dict integration test role
2021-04-06T05:14:40.1145356Z [32mInjecting "/tmp/python-9p9xd2z3-ansible/python" as a execv wrapper for the "/usr/bin/python3.5" interpreter.[0m
2021-04-06T05:14:40.1149207Z [32mRun command: ansible-playbook filter_dict-y0kg6mvg.yml -i inventory --diff -v[0m
2021-04-06T05:14:41.2119324Z [0;34mUsing /root/ansible_collections/felixfontein/tools/tests/output/.tmp/integration/filter_dict-drb3a1sm-ÅÑŚÌβŁÈ/tests/integration/integration.cfg as config file[0m
2021-04-06T05:14:41.6551038Z /root/ansible/lib/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.
2021-04-06T05:14:41.6552634Z from cryptography.exceptions import InvalidSignature
2021-04-06T05:14:41.6554204Z [1;35m[WARNING]: running playbook inside collection felixfontein.tools[0m
2021-04-06T05:14:41.8362461Z
2021-04-06T05:14:41.8363128Z PLAY [testhost] ****************************************************************
2021-04-06T05:14:41.8781439Z
2021-04-06T05:14:41.8782271Z TASK [Gathering Facts] *********************************************************
2021-04-06T05:14:43.0374223Z [0;32mok: [testhost][0m
2021-04-06T05:14:43.0789754Z
2021-04-06T05:14:43.0791127Z TASK [filter_dict : Test list_to_dict filter] **********************************
2021-04-06T05:14:43.4940300Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:43.4940995Z [0;32m "changed": false,[0m
2021-04-06T05:14:43.4941998Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:43.4942559Z [0;32m}[0m
2021-04-06T05:14:43.5134985Z
2021-04-06T05:14:43.5135725Z TASK [filter_dict : Test dict filter] ******************************************
2021-04-06T05:14:43.9143949Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:43.9144634Z [0;32m "changed": false,[0m
2021-04-06T05:14:43.9145300Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:43.9145856Z [0;32m}[0m
2021-04-06T05:14:43.9692771Z
2021-04-06T05:14:43.9693487Z PLAY RECAP *********************************************************************
2021-04-06T05:14:43.9694972Z [0;32mtesthost[0m : [0;32mok=3 [0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
2021-04-06T05:14:43.9695532Z
2021-04-06T05:14:44.6182759Z Running filter_domain_suffix integration test role
2021-04-06T05:14:44.6206329Z [32mRun command: ansible-playbook filter_domain_suffix-mg3h6oek.yml -i inventory --diff -v[0m
2021-04-06T05:14:45.5306057Z [0;34mUsing /root/ansible_collections/felixfontein/tools/tests/output/.tmp/integration/filter_domain_suffix-nppzzuha-ÅÑŚÌβŁÈ/tests/integration/integration.cfg as config file[0m
2021-04-06T05:14:45.9180313Z /root/ansible/lib/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.
2021-04-06T05:14:45.9181828Z from cryptography.exceptions import InvalidSignature
2021-04-06T05:14:45.9183590Z [1;35m[WARNING]: running playbook inside collection felixfontein.tools[0m
2021-04-06T05:14:46.0064933Z
2021-04-06T05:14:46.0065754Z PLAY [testhost] ****************************************************************
2021-04-06T05:14:46.0250739Z
2021-04-06T05:14:46.0251719Z TASK [Gathering Facts] *********************************************************
2021-04-06T05:14:47.9723817Z [0;32mok: [testhost][0m
2021-04-06T05:14:47.9737138Z
2021-04-06T05:14:47.9738013Z TASK [filter_domain_suffix : Test dns_zone filter] *****************************
2021-04-06T05:14:47.9739170Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:47.9740135Z [0;32m "changed": false,[0m
2021-04-06T05:14:47.9741174Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:47.9742073Z [0;32m}[0m
2021-04-06T05:14:47.9742584Z
2021-04-06T05:14:47.9743343Z TASK [filter_domain_suffix : Test dns_zone_prefix filter] **********************
2021-04-06T05:14:48.2293291Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:48.2293949Z [0;32m "changed": false,[0m
2021-04-06T05:14:48.2294719Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:48.2295288Z [0;32m}[0m
2021-04-06T05:14:48.2496559Z
2021-04-06T05:14:48.2497247Z TASK [filter_domain_suffix : Test get_domain_suffix filter] ********************
2021-04-06T05:14:48.7374839Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:48.7375466Z [0;32m "changed": false,[0m
2021-04-06T05:14:48.7376098Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:48.7376855Z [0;32m}[0m
2021-04-06T05:14:48.7567646Z
2021-04-06T05:14:48.7568414Z TASK [filter_domain_suffix : Test remove_domain_suffix filter] *****************
2021-04-06T05:14:49.2268037Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:49.2268712Z [0;32m "changed": false,[0m
2021-04-06T05:14:49.2269471Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:49.2270026Z [0;32m}[0m
2021-04-06T05:14:49.2463827Z
2021-04-06T05:14:49.2464469Z TASK [filter_domain_suffix : Test registrable_domain filter] *******************
2021-04-06T05:14:49.7611216Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:49.7611967Z [0;32m "changed": false,[0m
2021-04-06T05:14:49.7612631Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:49.7613199Z [0;32m}[0m
2021-04-06T05:14:49.8152567Z
2021-04-06T05:14:49.8153305Z PLAY RECAP *********************************************************************
2021-04-06T05:14:49.8161623Z [0;32mtesthost[0m : [0;32mok=6 [0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
2021-04-06T05:14:49.8163251Z
2021-04-06T05:14:50.4611385Z Running filter_path_join_shim integration test role
2021-04-06T05:14:50.4634889Z [32mRun command: ansible-playbook filter_path_join_shim-n0es6gwd.yml -i inventory --diff -v[0m
2021-04-06T05:14:51.4093789Z [0;34mUsing /root/ansible_collections/felixfontein/tools/tests/output/.tmp/integration/filter_path_join_shim-06p9xgfj-ÅÑŚÌβŁÈ/tests/integration/integration.cfg as config file[0m
2021-04-06T05:14:51.8222028Z /root/ansible/lib/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.
2021-04-06T05:14:51.8223585Z from cryptography.exceptions import InvalidSignature
2021-04-06T05:14:51.8225626Z [1;35m[WARNING]: running playbook inside collection felixfontein.tools[0m
2021-04-06T05:14:51.9091083Z
2021-04-06T05:14:51.9091902Z PLAY [testhost] ****************************************************************
2021-04-06T05:14:51.9390767Z
2021-04-06T05:14:51.9391508Z TASK [Gathering Facts] *********************************************************
2021-04-06T05:14:53.1231842Z [0;32mok: [testhost][0m
2021-04-06T05:14:53.1644200Z
2021-04-06T05:14:53.1645785Z TASK [filter_path_join_shim : Test path_join filter] ***************************
2021-04-06T05:14:53.3062185Z [0;32mok: [testhost] => {[0m
2021-04-06T05:14:53.3062847Z [0;32m "changed": false,[0m
2021-04-06T05:14:53.3063607Z [0;32m "msg": "All assertions passed"[0m
2021-04-06T05:14:53.3064161Z [0;32m}[0m
2021-04-06T05:14:53.3620476Z
2021-04-06T05:14:53.3622373Z PLAY RECAP *********************************************************************
2021-04-06T05:14:53.3623877Z [0;32mtesthost[0m : [0;32mok=2 [0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
2021-04-06T05:14:53.3624738Z
2021-04-06T05:14:53.9806394Z Running lookup_dependent integration test role
2021-04-06T05:14:53.9852850Z [32mRun command: ansible-playbook lookup_dependent-vjzxhqmn.yml -i inventory --diff -v[0m
2021-04-06T05:14:54.8912378Z [0;34mUsing /root/ansible_collections/felixfontein/tools/tests/output/.tmp/integration/lookup_dependent-ks276ivk-ÅÑŚÌβŁÈ/tests/integration/integration.cfg as config file[0m
2021-04-06T05:14:55.2949931Z /root/ansible/lib/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.
2021-04-06T05:14:55.2951594Z from cryptography.exceptions import InvalidSignature
2021-04-06T05:14:55.2953161Z [1;35m[WARNING]: running playbook inside collection felixfontein.tools[0m
2021-04-06T05:14:55.3926942Z
2021-04-06T05:14:55.3932048Z PLAY [testhost] ****************************************************************
2021-04-06T05:14:55.4154241Z
2021-04-06T05:14:55.4155440Z TASK [Gathering Facts] *********************************************************
2021-04-06T05:14:56.5738357Z [0;32mok: [testhost][0m
2021-04-06T05:14:56.6180053Z
2021-04-06T05:14:56.6181844Z TASK [lookup_dependent : Test 1: using with with_dependent] ********************
2021-04-06T05:14:56.8097679Z [0;32mok: [testhost] => (item={0: 1, 1: 4, 2: 41}) => {[0m
2021-04-06T05:14:56.8098356Z [0;32m "msg": "1 4 41"[0m
2021-04-06T05:14:56.8098836Z [0;32m}[0m
2021-04-06T05:14:56.8185068Z [0;32mok: [testhost] => (item={0: 1, 1: 7, 2: 71}) => {[0m
2021-04-06T05:14:56.8185700Z [0;32m "msg": "1 7 71"[0m
2021-04-06T05:14:56.8186194Z [0;32m}[0m
2021-04-06T05:14:56.8209276Z [0;32mok: [testhost] => (item={0: 2, 1: 5, 2: 52}) => {[0m
2021-04-06T05:14:56.8209986Z [0;32m "msg": "2 5 52"[0m
2021-04-06T05:14:56.8210507Z [0;32m}[0m
2021-04-06T05:14:56.8232524Z [0;32mok: [testhost] => (item={0: 2, 1: 8, 2: 82}) => {[0m
2021-04-06T05:14:56.8233198Z [0;32m "msg": "2 8 82"[0m
2021-04-06T05:14:56.8233698Z [0;32m}[0m
2021-04-06T05:14:56.8253803Z [0;31mERROR! A worker was found in a dead state[0m
2021-04-06T05:14:57.4619523Z [35mWARNING: Retrying test target "lookup_dependent" with maximum verbosity.[0m
(there is no more output with higher verbosity, so I'm skipping the remainder)
```
|
https://github.com/ansible/ansible/issues/74149
|
https://github.com/ansible/ansible/pull/74156
|
d1842afd59ea087b0d2c5081d1c6cbc295c57aba
|
96f94c0fef18cf25f772a2b241b9c4ce9ab9b74e
| 2021-04-06T05:23:55Z |
python
| 2021-04-06T16:26:52Z |
lib/ansible/executor/process/worker.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
import traceback
from jinja2.exceptions import TemplateNotFound
HAS_PYCRYPTO_ATFORK = False
try:
from Crypto.Random import atfork
HAS_PYCRYPTO_ATFORK = True
except Exception:
# We only need to call atfork if pycrypto is used because it will need to
# reinitialize its RNG. Since old paramiko could be using pycrypto, we
# need to take charge of calling it.
pass
from ansible.errors import AnsibleConnectionFailure
from ansible.executor.task_executor import TaskExecutor
from ansible.executor.task_result import TaskResult
from ansible.module_utils._text import to_text
from ansible.utils.display import Display
from ansible.utils.multiprocessing import context as multiprocessing_context
__all__ = ['WorkerProcess']
display = Display()
class WorkerProcess(multiprocessing_context.Process):
'''
The worker thread class, which uses TaskExecutor to run tasks
read from a job queue and pushes results into a results queue
for reading later.
'''
def __init__(self, final_q, task_vars, host, task, play_context, loader, variable_manager, shared_loader_obj):
super(WorkerProcess, self).__init__()
# takes a task queue manager as the sole param:
self._final_q = final_q
self._task_vars = task_vars
self._host = host
self._task = task
self._play_context = play_context
self._loader = loader
self._variable_manager = variable_manager
self._shared_loader_obj = shared_loader_obj
# NOTE: this works due to fork, if switching to threads this should change to per thread storage of temp files
# clear var to ensure we only delete files for this child
self._loader._tempfiles = set()
def _save_stdin(self):
self._new_stdin = os.devnull
try:
if sys.stdin.isatty() and sys.stdin.fileno() is not None:
try:
self._new_stdin = os.fdopen(os.dup(sys.stdin.fileno()))
except OSError:
# couldn't dupe stdin, most likely because it's
# not a valid file descriptor, so we just rely on
# using the one that was passed in
pass
except (AttributeError, ValueError):
# couldn't get stdin's fileno, so we just carry on
pass
def start(self):
'''
multiprocessing.Process replaces the worker's stdin with a new file
opened on os.devnull, but we wish to preserve it if it is connected to
a terminal. Therefore dup a copy prior to calling the real start(),
ensuring the descriptor is preserved somewhere in the new child, and
make sure it is closed in the parent when start() completes.
'''
self._save_stdin()
try:
return super(WorkerProcess, self).start()
finally:
if self._new_stdin != os.devnull:
self._new_stdin.close()
def _hard_exit(self, e):
'''
There is no safe exception to return to higher level code that does not
risk an innocent try/except finding itself executing in the wrong
process. All code executing above WorkerProcess.run() on the stack
conceptually belongs to another program.
'''
try:
display.debug(u"WORKER HARD EXIT: %s" % to_text(e))
except BaseException:
# If the cause of the fault is IOError being generated by stdio,
# attempting to log a debug message may trigger another IOError.
# Try printing once then give up.
pass
os._exit(1)
def run(self):
'''
Wrap _run() to ensure no possibility an errant exception can cause
control to return to the StrategyBase task loop, or any other code
higher in the stack.
As multiprocessing in Python 2.x provides no protection, it is possible
a try/except added in far-away code can cause a crashed child process
to suddenly assume the role and prior state of its parent.
'''
try:
return self._run()
except BaseException as e:
self._hard_exit(e)
finally:
# This is a hack, pure and simple, to work around a potential deadlock
# in ``multiprocessing.Process`` when flushing stdout/stderr during process
# shutdown. We have various ``Display`` calls that may fire from a fork
# so we cannot do this early. Instead, this happens at the very end
# to avoid that deadlock, by simply side stepping it. This should not be
# treated as a long term fix. Additionally this behavior only presents itself
# on Python3. Python2 does not exhibit the deadlock behavior.
# TODO: Evaluate overhauling ``Display`` to not write directly to stdout
# and evaluate migrating away from the ``fork`` multiprocessing start method.
if sys.version_info[0] >= 3:
sys.stdout = os.devnull
sys.stderr = os.devnull
def _run(self):
'''
Called when the process is started. Pushes the result onto the
results queue. We also remove the host from the blocked hosts list, to
signify that they are ready for their next task.
'''
# import cProfile, pstats, StringIO
# pr = cProfile.Profile()
# pr.enable()
if HAS_PYCRYPTO_ATFORK:
atfork()
try:
# execute the task and build a TaskResult from the result
display.debug("running TaskExecutor() for %s/%s" % (self._host, self._task))
executor_result = TaskExecutor(
self._host,
self._task,
self._task_vars,
self._play_context,
self._new_stdin,
self._loader,
self._shared_loader_obj,
self._final_q
).run()
display.debug("done running TaskExecutor() for %s/%s [%s]" % (self._host, self._task, self._task._uuid))
self._host.vars = dict()
self._host.groups = []
# put the result on the result queue
display.debug("sending task result for task %s" % self._task._uuid)
self._final_q.send_task_result(
self._host.name,
self._task._uuid,
executor_result,
task_fields=self._task.dump_attrs(),
)
display.debug("done sending task result for task %s" % self._task._uuid)
except AnsibleConnectionFailure:
self._host.vars = dict()
self._host.groups = []
self._final_q.send_task_result(
self._host.name,
self._task._uuid,
dict(unreachable=True),
task_fields=self._task.dump_attrs(),
)
except Exception as e:
if not isinstance(e, (IOError, EOFError, KeyboardInterrupt, SystemExit)) or isinstance(e, TemplateNotFound):
try:
self._host.vars = dict()
self._host.groups = []
self._final_q.send_task_result(
self._host.name,
self._task._uuid,
dict(failed=True, exception=to_text(traceback.format_exc()), stdout=''),
task_fields=self._task.dump_attrs(),
)
except Exception:
display.debug(u"WORKER EXCEPTION: %s" % to_text(e))
display.debug(u"WORKER TRACEBACK: %s" % to_text(traceback.format_exc()))
finally:
self._clean_up()
display.debug("WORKER PROCESS EXITING")
# pr.disable()
# s = StringIO.StringIO()
# sortby = 'time'
# ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
# ps.print_stats()
# with open('worker_%06d.stats' % os.getpid(), 'w') as f:
# f.write(s.getvalue())
def _clean_up(self):
# NOTE: see note in init about forks
# ensure we cleanup all temp files for this worker
self._loader.cleanup_all_tmp_files()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,137 |
task contains deprecated call to be removed in 2.12
|
##### SUMMARY
task contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/task.py:267:20: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/task.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74137
|
https://github.com/ansible/ansible/pull/74154
|
7443e9d659e48d8a4096d4f64d4610f38188d405
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
| 2021-04-05T20:33:59Z |
python
| 2021-04-08T19:09:45Z |
changelogs/fragments/deprecated_task_parameters.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,137 |
task contains deprecated call to be removed in 2.12
|
##### SUMMARY
task contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/task.py:267:20: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/task.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74137
|
https://github.com/ansible/ansible/pull/74154
|
7443e9d659e48d8a4096d4f64d4610f38188d405
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
| 2021-04-05T20:33:59Z |
python
| 2021-04-08T19:09:45Z |
lib/ansible/playbook/role/__init__.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError
from ansible.module_utils._text import to_text
from ansible.module_utils.six import iteritems, binary_type, text_type
from ansible.module_utils.common._collections_compat import Container, Mapping, Set, Sequence
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable
from ansible.plugins.loader import add_all_plugin_dirs
from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.vars import combine_vars
__all__ = ['Role', 'hash_params']
# TODO: this should be a utility function, but can't be a member of
# the role due to the fact that it would require the use of self
# in a static method. This is also used in the base class for
# strategies (ansible/plugins/strategy/__init__.py)
def hash_params(params):
"""
Construct a data structure of parameters that is hashable.
This requires changing any mutable data structures into immutable ones.
We chose a frozenset because role parameters have to be unique.
.. warning:: this does not handle unhashable scalars. Two things
mitigate that limitation:
1) There shouldn't be any unhashable scalars specified in the yaml
2) Our only choice would be to return an error anyway.
"""
# Any container is unhashable if it contains unhashable items (for
# instance, tuple() is a Hashable subclass but if it contains a dict, it
# cannot be hashed)
if isinstance(params, Container) and not isinstance(params, (text_type, binary_type)):
if isinstance(params, Mapping):
try:
# Optimistically hope the contents are all hashable
new_params = frozenset(params.items())
except TypeError:
new_params = set()
for k, v in params.items():
# Hash each entry individually
new_params.add((k, hash_params(v)))
new_params = frozenset(new_params)
elif isinstance(params, (Set, Sequence)):
try:
# Optimistically hope the contents are all hashable
new_params = frozenset(params)
except TypeError:
new_params = set()
for v in params:
# Hash each entry individually
new_params.add(hash_params(v))
new_params = frozenset(new_params)
else:
# This is just a guess.
new_params = frozenset(params)
return new_params
# Note: We do not handle unhashable scalars but our only choice would be
# to raise an error there anyway.
return frozenset((params,))
class Role(Base, Conditional, Taggable, CollectionSearch):
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool')
def __init__(self, play=None, from_files=None, from_include=False, validate=True):
self._role_name = None
self._role_path = None
self._role_collection = None
self._role_params = dict()
self._loader = None
self._metadata = None
self._play = play
self._parents = []
self._dependencies = []
self._task_blocks = []
self._handler_blocks = []
self._compiled_handler_blocks = None
self._default_vars = dict()
self._role_vars = dict()
self._had_task_run = dict()
self._completed = dict()
self._should_validate = validate
if from_files is None:
from_files = {}
self._from_files = from_files
# Indicates whether this role was included via include/import_role
self.from_include = from_include
super(Role, self).__init__()
def __repr__(self):
return self.get_name()
def get_name(self, include_role_fqcn=True):
if include_role_fqcn:
return '.'.join(x for x in (self._role_collection, self._role_name) if x)
return self._role_name
@staticmethod
def load(role_include, play, parent_role=None, from_files=None, from_include=False, validate=True):
if from_files is None:
from_files = {}
try:
# The ROLE_CACHE is a dictionary of role names, with each entry
# containing another dictionary corresponding to a set of parameters
# specified for a role as the key and the Role() object itself.
# We use frozenset to make the dictionary hashable.
params = role_include.get_role_params()
if role_include.when is not None:
params['when'] = role_include.when
if role_include.tags is not None:
params['tags'] = role_include.tags
if from_files is not None:
params['from_files'] = from_files
if role_include.vars:
params['vars'] = role_include.vars
params['from_include'] = from_include
hashed_params = hash_params(params)
if role_include.get_name() in play.ROLE_CACHE:
for (entry, role_obj) in iteritems(play.ROLE_CACHE[role_include.get_name()]):
if hashed_params == entry:
if parent_role:
role_obj.add_parent(parent_role)
return role_obj
# TODO: need to fix cycle detection in role load (maybe use an empty dict
# for the in-flight in role cache as a sentinel that we're already trying to load
# that role?)
# see https://github.com/ansible/ansible/issues/61527
r = Role(play=play, from_files=from_files, from_include=from_include, validate=validate)
r._load_role_data(role_include, parent_role=parent_role)
if role_include.get_name() not in play.ROLE_CACHE:
play.ROLE_CACHE[role_include.get_name()] = dict()
# FIXME: how to handle cache keys for collection-based roles, since they're technically adjustable per task?
play.ROLE_CACHE[role_include.get_name()][hashed_params] = r
return r
except RuntimeError:
raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles",
obj=role_include._ds)
def _load_role_data(self, role_include, parent_role=None):
self._role_name = role_include.role
self._role_path = role_include.get_role_path()
self._role_collection = role_include._role_collection
self._role_params = role_include.get_role_params()
self._variable_manager = role_include.get_variable_manager()
self._loader = role_include.get_loader()
if parent_role:
self.add_parent(parent_role)
# copy over all field attributes from the RoleInclude
# update self._attributes directly, to avoid squashing
for (attr_name, _) in iteritems(self._valid_attrs):
if attr_name in ('when', 'tags'):
self._attributes[attr_name] = self._extend_value(
self._attributes[attr_name],
role_include._attributes[attr_name],
)
else:
self._attributes[attr_name] = role_include._attributes[attr_name]
# vars and default vars are regular dictionaries
self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True)
if self._role_vars is None:
self._role_vars = {}
elif not isinstance(self._role_vars, Mapping):
raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
self._default_vars = self._load_role_yaml('defaults', main=self._from_files.get('defaults'), allow_dir=True)
if self._default_vars is None:
self._default_vars = {}
elif not isinstance(self._default_vars, Mapping):
raise AnsibleParserError("The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
# load the role's other files, if they exist
metadata = self._load_role_yaml('meta')
if metadata:
self._metadata = RoleMetadata.load(metadata, owner=self, variable_manager=self._variable_manager, loader=self._loader)
self._dependencies = self._load_dependencies()
else:
self._metadata = RoleMetadata()
# reset collections list; roles do not inherit collections from parents, just use the defaults
# FUTURE: use a private config default for this so we can allow it to be overridden later
self.collections = []
# configure plugin/collection loading; either prepend the current role's collection or configure legacy plugin loading
# FIXME: need exception for explicit ansible.legacy?
if self._role_collection: # this is a collection-hosted role
self.collections.insert(0, self._role_collection)
else: # this is a legacy role, but set the default collection if there is one
default_collection = AnsibleCollectionConfig.default_collection
if default_collection:
self.collections.insert(0, default_collection)
# legacy role, ensure all plugin dirs under the role are added to plugin search path
add_all_plugin_dirs(self._role_path)
# collections can be specified in metadata for legacy or collection-hosted roles
if self._metadata.collections:
self.collections.extend((c for c in self._metadata.collections if c not in self.collections))
# if any collections were specified, ensure that core or legacy synthetic collections are always included
if self.collections:
# default append collection is core for collection-hosted roles, legacy for others
default_append_collection = 'ansible.builtin' if self._role_collection else 'ansible.legacy'
if 'ansible.builtin' not in self.collections and 'ansible.legacy' not in self.collections:
self.collections.append(default_append_collection)
task_data = self._load_role_yaml('tasks', main=self._from_files.get('tasks'))
if self._should_validate:
task_data = self._prepend_validation_task(task_data)
if task_data:
try:
self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager)
except AssertionError as e:
raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name,
obj=task_data, orig_exc=e)
handler_data = self._load_role_yaml('handlers', main=self._from_files.get('handlers'))
if handler_data:
try:
self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader,
variable_manager=self._variable_manager)
except AssertionError as e:
raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name,
obj=handler_data, orig_exc=e)
def _prepend_validation_task(self, task_data):
'''Insert a role validation task if we have a role argument spec.
This method will prepend a validation task to the front of the role task
list to perform argument spec validation before any other tasks, if an arg spec
exists for the entry point. Entry point defaults to `main`.
:param task_data: List of tasks loaded from the role.
:returns: The (possibly modified) task list.
'''
if self._metadata.argument_specs:
# Determine the role entry point so we can retrieve the correct argument spec.
# This comes from the `tasks_from` value to include_role or import_role.
entrypoint = self._from_files.get('tasks', 'main')
entrypoint_arg_spec = self._metadata.argument_specs.get(entrypoint)
if entrypoint_arg_spec:
validation_task = self._create_validation_task(entrypoint_arg_spec, entrypoint)
# Prepend our validate_argument_spec action to happen before any tasks provided by the role.
# 'any tasks' can and does include 0 or None tasks, in which cases we create a list of tasks and add our
# validate_argument_spec task
if not task_data:
task_data = []
task_data.insert(0, validation_task)
return task_data
def _create_validation_task(self, argument_spec, entrypoint_name):
'''Create a new task data structure that uses the validate_argument_spec action plugin.
:param argument_spec: The arg spec definition for a particular role entry point.
This will be the entire arg spec for the entry point as read from the input file.
:param entrypoint_name: The name of the role entry point associated with the
supplied `argument_spec`.
'''
# If the arg spec provides a short description, use it to flesh out the validation task name
task_name = "Validating arguments against arg spec '%s'" % entrypoint_name
if 'short_description' in argument_spec:
task_name = task_name + ' - ' + argument_spec['short_description']
return {
'action': {
'module': 'ansible.builtin.validate_argument_spec',
# Pass only the 'options' portion of the arg spec to the module.
'argument_spec': argument_spec.get('options', {}),
'provided_arguments': self._role_params,
'validate_args_context': {
'type': 'role',
'name': self._role_name,
'argument_spec_name': entrypoint_name,
'path': self._role_path
},
},
'name': task_name,
}
def _load_role_yaml(self, subdir, main=None, allow_dir=False):
'''
Find and load role YAML files and return data found.
:param subdir: subdir of role to search (vars, files, tasks, handlers, defaults)
:type subdir: string
:param main: filename to match, will default to 'main.<ext>' if not provided.
:type main: string
:param allow_dir: If true we combine results of multiple matching files found.
If false, highlander rules. Only for vars(dicts) and not tasks(lists).
:type allow_dir: bool
:returns: data from the matched file(s), type can be dict or list depending on vars or tasks.
'''
data = None
file_path = os.path.join(self._role_path, subdir)
if self._loader.path_exists(file_path) and self._loader.is_directory(file_path):
# Valid extensions and ordering for roles is hard-coded to maintain portability
extensions = ['.yml', '.yaml', '.json'] # same as default for YAML_FILENAME_EXTENSIONS
# look for files w/o extensions before/after bare name depending on it being set or not
# keep 'main' as original to figure out errors if no files found
if main is None:
_main = 'main'
extensions.append('')
else:
_main = main
extensions.insert(0, '')
# not really 'find_vars_files' but find_files_with_extensions_default_to_yaml_filename_extensions
found_files = self._loader.find_vars_files(file_path, _main, extensions, allow_dir)
if found_files:
for found in found_files:
new_data = self._loader.load_from_file(found)
if new_data:
if data is not None and isinstance(new_data, Mapping):
data = combine_vars(data, new_data)
else:
data = new_data
# found data so no need to continue unless we want to merge
if not allow_dir:
break
elif main is not None:
# this won't trigger with default only when <subdir>_from is specified
raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main))
return data
def _load_dependencies(self):
'''
Recursively loads role dependencies from the metadata list of
dependencies, if it exists
'''
deps = []
if self._metadata:
for role_include in self._metadata.dependencies:
r = Role.load(role_include, play=self._play, parent_role=self)
deps.append(r)
return deps
# other functions
def add_parent(self, parent_role):
''' adds a role to the list of this roles parents '''
if not isinstance(parent_role, Role):
raise AnsibleAssertionError()
if parent_role not in self._parents:
self._parents.append(parent_role)
def get_parents(self):
return self._parents
def get_default_vars(self, dep_chain=None):
dep_chain = [] if dep_chain is None else dep_chain
default_vars = dict()
for dep in self.get_all_dependencies():
default_vars = combine_vars(default_vars, dep.get_default_vars())
if dep_chain:
for parent in dep_chain:
default_vars = combine_vars(default_vars, parent._default_vars)
default_vars = combine_vars(default_vars, self._default_vars)
return default_vars
def get_inherited_vars(self, dep_chain=None):
dep_chain = [] if dep_chain is None else dep_chain
inherited_vars = dict()
if dep_chain:
for parent in dep_chain:
inherited_vars = combine_vars(inherited_vars, parent._role_vars)
return inherited_vars
def get_role_params(self, dep_chain=None):
dep_chain = [] if dep_chain is None else dep_chain
params = {}
if dep_chain:
for parent in dep_chain:
params = combine_vars(params, parent._role_params)
params = combine_vars(params, self._role_params)
return params
def get_vars(self, dep_chain=None, include_params=True):
dep_chain = [] if dep_chain is None else dep_chain
all_vars = self.get_inherited_vars(dep_chain)
for dep in self.get_all_dependencies():
all_vars = combine_vars(all_vars, dep.get_vars(include_params=include_params))
all_vars = combine_vars(all_vars, self.vars)
all_vars = combine_vars(all_vars, self._role_vars)
if include_params:
all_vars = combine_vars(all_vars, self.get_role_params(dep_chain=dep_chain))
return all_vars
def get_direct_dependencies(self):
return self._dependencies[:]
def get_all_dependencies(self):
'''
Returns a list of all deps, built recursively from all child dependencies,
in the proper order in which they should be executed or evaluated.
'''
child_deps = []
for dep in self.get_direct_dependencies():
for child_dep in dep.get_all_dependencies():
child_deps.append(child_dep)
child_deps.append(dep)
return child_deps
def get_task_blocks(self):
return self._task_blocks[:]
def get_handler_blocks(self, play, dep_chain=None):
# Do not recreate this list each time ``get_handler_blocks`` is called.
# Cache the results so that we don't potentially overwrite with copied duplicates
#
# ``get_handler_blocks`` may be called when handling ``import_role`` during parsing
# as well as with ``Play.compile_roles_handlers`` from ``TaskExecutor``
if self._compiled_handler_blocks:
return self._compiled_handler_blocks
self._compiled_handler_blocks = block_list = []
# update the dependency chain here
if dep_chain is None:
dep_chain = []
new_dep_chain = dep_chain + [self]
for dep in self.get_direct_dependencies():
dep_blocks = dep.get_handler_blocks(play=play, dep_chain=new_dep_chain)
block_list.extend(dep_blocks)
for task_block in self._handler_blocks:
new_task_block = task_block.copy()
new_task_block._dep_chain = new_dep_chain
new_task_block._play = play
block_list.append(new_task_block)
return block_list
def has_run(self, host):
'''
Returns true if this role has been iterated over completely and
at least one task was run
'''
return host.name in self._completed and not self._metadata.allow_duplicates
def compile(self, play, dep_chain=None):
'''
Returns the task list for this role, which is created by first
recursively compiling the tasks for all direct dependencies, and
then adding on the tasks for this role.
The role compile() also remembers and saves the dependency chain
with each task, so tasks know by which route they were found, and
can correctly take their parent's tags/conditionals into account.
'''
from ansible.playbook.block import Block
from ansible.playbook.task import Task
block_list = []
# update the dependency chain here
if dep_chain is None:
dep_chain = []
new_dep_chain = dep_chain + [self]
deps = self.get_direct_dependencies()
for dep in deps:
dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain)
block_list.extend(dep_blocks)
for task_block in self._task_blocks:
new_task_block = task_block.copy()
new_task_block._dep_chain = new_dep_chain
new_task_block._play = play
block_list.append(new_task_block)
eor_block = Block(play=play)
eor_block._loader = self._loader
eor_block._role = self
eor_block._variable_manager = self._variable_manager
eor_block.run_once = False
eor_task = Task(block=eor_block)
eor_task._role = self
eor_task.action = 'meta'
eor_task.args = {'_raw_params': 'role_complete'}
eor_task.implicit = True
eor_task.tags = ['always']
eor_task.when = True
eor_block.block = [eor_task]
block_list.append(eor_block)
return block_list
def serialize(self, include_deps=True):
res = super(Role, self).serialize()
res['_role_name'] = self._role_name
res['_role_path'] = self._role_path
res['_role_vars'] = self._role_vars
res['_role_params'] = self._role_params
res['_default_vars'] = self._default_vars
res['_had_task_run'] = self._had_task_run.copy()
res['_completed'] = self._completed.copy()
if self._metadata:
res['_metadata'] = self._metadata.serialize()
if include_deps:
deps = []
for role in self.get_direct_dependencies():
deps.append(role.serialize())
res['_dependencies'] = deps
parents = []
for parent in self._parents:
parents.append(parent.serialize(include_deps=False))
res['_parents'] = parents
return res
def deserialize(self, data, include_deps=True):
self._role_name = data.get('_role_name', '')
self._role_path = data.get('_role_path', '')
self._role_vars = data.get('_role_vars', dict())
self._role_params = data.get('_role_params', dict())
self._default_vars = data.get('_default_vars', dict())
self._had_task_run = data.get('_had_task_run', dict())
self._completed = data.get('_completed', dict())
if include_deps:
deps = []
for dep in data.get('_dependencies', []):
r = Role()
r.deserialize(dep)
deps.append(r)
setattr(self, '_dependencies', deps)
parent_data = data.get('_parents', [])
parents = []
for parent in parent_data:
r = Role()
r.deserialize(parent, include_deps=False)
parents.append(r)
setattr(self, '_parents', parents)
metadata_data = data.get('_metadata')
if metadata_data:
m = RoleMetadata()
m.deserialize(metadata_data)
self._metadata = m
super(Role, self).deserialize(data)
def set_loader(self, loader):
self._loader = loader
for parent in self._parents:
parent.set_loader(loader)
for dep in self.get_direct_dependencies():
dep.set_loader(loader)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,137 |
task contains deprecated call to be removed in 2.12
|
##### SUMMARY
task contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/task.py:267:20: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/task.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74137
|
https://github.com/ansible/ansible/pull/74154
|
7443e9d659e48d8a4096d4f64d4610f38188d405
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
| 2021-04-05T20:33:59Z |
python
| 2021-04-08T19:09:45Z |
lib/ansible/playbook/task.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems, string_types
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.plugins.loader import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.block import Block
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
__all__ = ['Task']
display = Display()
class Task(Base, Conditional, Taggable, CollectionSearch):
"""
A task is a language feature that represents a call to a module, with given arguments and other parameters.
A handler is a subclass of a task.
Usage:
Task.load(datastructure) -> Task
Task.something(...)
"""
# =================================================================================
# ATTRIBUTES
# load_<attribute_name> and
# validate_<attribute_name>
# will be used if defined
# might be possible to define others
# NOTE: ONLY set defaults on task attributes that are not inheritable,
# inheritance is only triggered if the 'current value' is None,
# default can be set at play/top level object and inheritance will take it's course.
_args = FieldAttribute(isa='dict', default=dict)
_action = FieldAttribute(isa='string')
_async_val = FieldAttribute(isa='int', default=0, alias='async')
_changed_when = FieldAttribute(isa='list', default=list)
_delay = FieldAttribute(isa='int', default=5)
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool')
_failed_when = FieldAttribute(isa='list', default=list)
_loop = FieldAttribute()
_loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
_notify = FieldAttribute(isa='list')
_poll = FieldAttribute(isa='int', default=C.DEFAULT_POLL_INTERVAL)
_register = FieldAttribute(isa='string', static=True)
_retries = FieldAttribute(isa='int', default=3)
_until = FieldAttribute(isa='list', default=list)
# deprecated, used to be loop and loop_args but loop has been repurposed
_loop_with = FieldAttribute(isa='string', private=True, inherit=False)
def __init__(self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
# This is a reference of all the candidate action names for transparent execution of module_defaults with redirected content
# This isn't a FieldAttribute to prevent it from being set via the playbook
self._ansible_internal_redirect_list = []
self._role = role
self._parent = None
self.implicit = False
if task_include:
self._parent = task_include
else:
self._parent = block
super(Task, self).__init__()
def get_path(self):
''' return the absolute path of the task with its line number '''
path = ""
if hasattr(self, '_ds') and hasattr(self._ds, '_data_source') and hasattr(self._ds, '_line_number'):
path = "%s:%s" % (self._ds._data_source, self._ds._line_number)
elif hasattr(self._parent._play, '_ds') and hasattr(self._parent._play._ds, '_data_source') and hasattr(self._parent._play._ds, '_line_number'):
path = "%s:%s" % (self._parent._play._ds._data_source, self._parent._play._ds._line_number)
return path
def get_name(self, include_role_fqcn=True):
''' return the name of the task '''
if self._role:
role_name = self._role.get_name(include_role_fqcn=include_role_fqcn)
if self._role and self.name:
return "%s : %s" % (role_name, self.name)
elif self.name:
return self.name
else:
if self._role:
return "%s : %s" % (role_name, self.action)
else:
return "%s" % (self.action,)
def _merge_kv(self, ds):
if ds is None:
return ""
elif isinstance(ds, string_types):
return ds
elif isinstance(ds, dict):
buf = ""
for (k, v) in iteritems(ds):
if k.startswith('_'):
continue
buf = buf + "%s=%s " % (k, v)
buf = buf.strip()
return buf
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Task(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def __repr__(self):
''' returns a human readable representation of the task '''
if self.get_name() in C._ACTION_META:
return "TASK: meta (%s)" % self.args['_raw_params']
else:
return "TASK: %s" % self.get_name()
def _preprocess_with_loop(self, ds, new_ds, k, v):
''' take a lookup plugin name and store it correctly '''
loop_name = k.replace("with_", "")
if new_ds.get('loop') is not None or new_ds.get('loop_with') is not None:
raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
if v is None:
raise AnsibleError("you must specify a value when using %s" % k, obj=ds)
new_ds['loop_with'] = loop_name
new_ds['loop'] = v
# display.deprecated("with_ type loops are being phased out, use the 'loop' keyword instead",
# version="2.10", collection_name='ansible.builtin')
def preprocess_data(self, ds):
'''
tasks are especially complex arguments so need pre-processing.
keep it short.
'''
if not isinstance(ds, dict):
raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds)))
# the new, cleaned datastructure, which will have legacy
# items reduced to a standard structure suitable for the
# attributes of the task class
new_ds = AnsibleMapping()
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
default_collection = AnsibleCollectionConfig.default_collection
collections_list = ds.get('collections')
if collections_list is None:
# use the parent value if our ds doesn't define it
collections_list = self.collections
else:
# Validate this untemplated field early on to guarantee we are dealing with a list.
# This is also done in CollectionSearch._load_collections() but this runs before that call.
collections_list = self.get_validated_value('collections', self._collections, collections_list, None)
if default_collection and not self._role: # FIXME: and not a collections role
if collections_list:
if default_collection not in collections_list:
collections_list.insert(0, default_collection)
else:
collections_list = [default_collection]
if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list:
collections_list.append('ansible.legacy')
if collections_list:
ds['collections'] = collections_list
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
# if the raises exception was created with obj=ds args, then it includes the detail
# so we dont need to add it so we can just re raise.
if e.obj:
raise
# But if it wasn't, we can add the yaml object now to get more detail
raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)
else:
self._ansible_internal_redirect_list = args_parser.internal_redirect_list[:]
# the command/shell/script modules used to support the `cmd` arg,
# which corresponds to what we now call _raw_params, so move that
# value over to _raw_params (assuming it is empty)
if action in C._ACTION_HAS_CMD:
if 'cmd' in args:
if args.get('_raw_params', '') != '':
raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
" Please put everything in one or the other place.", obj=ds)
args['_raw_params'] = args.pop('cmd')
new_ds['action'] = action
new_ds['args'] = args
new_ds['delegate_to'] = delegate_to
# we handle any 'vars' specified in the ds here, as we may
# be adding things to them below (special handling for includes).
# When that deprecated feature is removed, this can be too.
if 'vars' in ds:
# _load_vars is defined in Base, and is used to load a dictionary
# or list of dictionaries in a standard way
new_ds['vars'] = self._load_vars(None, ds.get('vars'))
else:
new_ds['vars'] = dict()
for (k, v) in iteritems(ds):
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
# we don't want to re-assign these values, which were determined by the ModuleArgsParser() above
continue
elif k.startswith('with_') and k.replace("with_", "") in lookup_loader:
# transform into loop property
self._preprocess_with_loop(ds, new_ds, k, v)
else:
# pre-2.0 syntax allowed variables for include statements at the top level of the task,
# so we move those into the 'vars' dictionary here, and show a deprecation message
# as we will remove this at some point in the future.
if action in C._ACTION_INCLUDE and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
display.deprecated("Specifying include variables at the top-level of the task is deprecated."
" Please see:\nhttps://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
" for currently supported syntax regarding included files and variables",
version="2.12", collection_name='ansible.builtin')
new_ds['vars'][k] = v
elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs:
new_ds[k] = v
else:
display.warning("Ignoring invalid attribute: %s" % k)
return super(Task, self).preprocess_data(new_ds)
def _load_loop_control(self, attr, ds):
if not isinstance(ds, dict):
raise AnsibleParserError(
"the `loop_control` value must be specified as a dictionary and cannot "
"be a variable itself (though it can contain variables)",
obj=ds,
)
return LoopControl.load(data=ds, variable_manager=self._variable_manager, loader=self._loader)
def _validate_attributes(self, ds):
try:
super(Task, self)._validate_attributes(ds)
except AnsibleParserError as e:
e.message += '\nThis error can be suppressed as a warning using the "invalid_task_attribute_failed" configuration'
raise e
def post_validate(self, templar):
'''
Override of base class post_validate, to also do final validation on
the block and task include (if any) to which this task belongs.
'''
if self._parent:
self._parent.post_validate(templar)
if AnsibleCollectionConfig.default_collection:
pass
super(Task, self).post_validate(templar)
def _post_validate_loop(self, attr, value, templar):
'''
Override post validation for the loop field, which is templated
specially in the TaskExecutor class when evaluating loops.
'''
return value
def _post_validate_environment(self, attr, value, templar):
'''
Override post validation of vars on the play, as we don't want to
template these too early.
'''
env = {}
if value is not None:
def _parse_env_kv(k, v):
try:
env[k] = templar.template(v, convert_bare=False)
except AnsibleUndefinedVariable as e:
error = to_native(e)
if self.action in C._ACTION_FACT_GATHERING and 'ansible_facts.env' in error or 'ansible_env' in error:
# ignore as fact gathering is required for 'env' facts
return
raise
if isinstance(value, list):
for env_item in value:
if isinstance(env_item, dict):
for k in env_item:
_parse_env_kv(k, env_item[k])
else:
isdict = templar.template(env_item, convert_bare=False)
if isinstance(isdict, dict):
env.update(isdict)
else:
display.warning("could not parse environment value, skipping: %s" % value)
elif isinstance(value, dict):
# should not really happen
env = dict()
for env_item in value:
_parse_env_kv(env_item, value[env_item])
else:
# at this point it should be a simple string, also should not happen
env = templar.template(value, convert_bare=False)
return env
def _post_validate_changed_when(self, attr, value, templar):
'''
changed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_failed_when(self, attr, value, templar):
'''
failed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_until(self, attr, value, templar):
'''
until is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def get_vars(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_vars())
all_vars.update(self.vars)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
return all_vars
def get_include_params(self):
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_include_params())
if self.action in C._ACTION_ALL_INCLUDES:
all_vars.update(self.vars)
return all_vars
def copy(self, exclude_parent=False, exclude_tasks=False):
new_me = super(Task, self).copy()
# if the task has an associated list of candidate names, copy it to the new object too
new_me._ansible_internal_redirect_list = self._ansible_internal_redirect_list[:]
new_me._parent = None
if self._parent and not exclude_parent:
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
new_me._role = None
if self._role:
new_me._role = self._role
new_me.implicit = self.implicit
return new_me
def serialize(self):
data = super(Task, self).serialize()
if not self._squashed and not self._finalized:
if self._parent:
data['parent'] = self._parent.serialize()
data['parent_type'] = self._parent.__class__.__name__
if self._role:
data['role'] = self._role.serialize()
if self._ansible_internal_redirect_list:
data['_ansible_internal_redirect_list'] = self._ansible_internal_redirect_list[:]
data['implicit'] = self.implicit
return data
def deserialize(self, data):
# import is here to avoid import loops
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.handler_task_include import HandlerTaskInclude
parent_data = data.get('parent', None)
if parent_data:
parent_type = data.get('parent_type')
if parent_type == 'Block':
p = Block()
elif parent_type == 'TaskInclude':
p = TaskInclude()
elif parent_type == 'HandlerTaskInclude':
p = HandlerTaskInclude()
p.deserialize(parent_data)
self._parent = p
del data['parent']
role_data = data.get('role')
if role_data:
r = Role()
r.deserialize(role_data)
self._role = r
del data['role']
self._ansible_internal_redirect_list = data.get('_ansible_internal_redirect_list', [])
self.implicit = data.get('implicit', False)
super(Task, self).deserialize(data)
def set_loader(self, loader):
'''
Sets the loader on this object and recursively on parent, child objects.
This is used primarily after the Task has been serialized/deserialized, which
does not preserve the loader.
'''
self._loader = loader
if self._parent:
self._parent.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False, prepend=False):
'''
Generic logic to get the attribute or parent attribute for a task value.
'''
extend = self._valid_attrs[attr].extend
prepend = self._valid_attrs[attr].prepend
try:
value = self._attributes[attr]
# If parent is static, we can grab attrs from the parent
# otherwise, defer to the grandparent
if getattr(self._parent, 'statically_loaded', True):
_parent = self._parent
else:
_parent = self._parent._parent
if _parent and (value is Sentinel or extend):
if getattr(_parent, 'statically_loaded', True):
# vars are always inheritable, other attributes might not be for the parent but still should be for other ancestors
if attr != 'vars' and hasattr(_parent, '_get_parent_attribute'):
parent_value = _parent._get_parent_attribute(attr)
else:
parent_value = _parent._attributes.get(attr, Sentinel)
if extend:
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except KeyError:
pass
return value
def get_dep_chain(self):
if self._parent:
return self._parent.get_dep_chain()
else:
return None
def get_search_path(self):
'''
Return the list of paths you should search for files, in order.
This follows role/playbook dependency chain.
'''
path_stack = []
dep_chain = self.get_dep_chain()
# inside role: add the dependency chain from current to dependent
if dep_chain:
path_stack.extend(reversed([x._role_path for x in dep_chain]))
# add path of task itself, unless it is already in the list
task_dir = os.path.dirname(self.get_path())
if task_dir not in path_stack:
path_stack.append(task_dir)
return path_stack
def all_parents_static(self):
if self._parent:
return self._parent.all_parents_static()
return True
def get_first_parent_include(self):
from ansible.playbook.task_include import TaskInclude
if self._parent:
if isinstance(self._parent, TaskInclude):
return self._parent
return self._parent.get_first_parent_include()
return None
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,137 |
task contains deprecated call to be removed in 2.12
|
##### SUMMARY
task contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/task.py:267:20: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/task.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74137
|
https://github.com/ansible/ansible/pull/74154
|
7443e9d659e48d8a4096d4f64d4610f38188d405
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
| 2021-04-05T20:33:59Z |
python
| 2021-04-08T19:09:45Z |
test/sanity/ignore.txt
|
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
lib/ansible/cli/console.py pylint:blacklisted-name
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/cli/scripts/ansible_cli_stub.py pylint:ansible-deprecated-version
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/pycompat24.py no-get-exception
lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py no-basestring
lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
lib/ansible/module_utils/urls.py pylint:blacklisted-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
lib/ansible/modules/async_wrapper.py use-argspec-type-path
lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
lib/ansible/modules/copy.py pylint:blacklisted-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
lib/ansible/modules/cron.py pylint:ansible-deprecated-version
lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
lib/ansible/modules/git.py pylint:blacklisted-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/iptables.py pylint:blacklisted-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/pip.py pylint:blacklisted-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/uri.py pylint:blacklisted-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:blacklisted-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/__init__.py pylint:ansible-deprecated-version
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/conditional.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/play_context.py pylint:ansible-deprecated-version
lib/ansible/playbook/role/__init__.py pylint:blacklisted-name
lib/ansible/playbook/task.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/async_status.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/connection/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/script.py pylint:ansible-deprecated-version
lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
lib/ansible/plugins/strategy/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
lib/ansible/vars/fact_cache.py pylint:ansible-deprecated-version
lib/ansible/vars/hostvars.py pylint:blacklisted-name
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/lib/ansible_test/_data/setup/windows-httptester.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-argument-not-valid-type
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/integration/plugins/module_utils/database.py future-import-boilerplate
test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
test/support/integration/plugins/modules/docker_swarm.py pylint:ansible-deprecated-version
test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:blacklisted-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
test/units/modules/test_apt.py pylint:blacklisted-name
test/units/parsing/vault/test_vault.py pylint:blacklisted-name
test/units/playbook/role/test_role.py pylint:blacklisted-name
test/units/plugins/test_plugins.py pylint:blacklisted-name
test/units/template/test_templar.py pylint:blacklisted-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
test/utils/shippable/check_matrix.py replace-urlopen
test/utils/shippable/timing.py shebang
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,141 |
connection contains deprecated call to be removed in 2.12
|
##### SUMMARY
connection contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/connection/__init__.py:238:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:245:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:252:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:259:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/connection/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74141
|
https://github.com/ansible/ansible/pull/74155
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
|
b07a78b4ba1a43465f7987420c79c240c97a67ae
| 2021-04-05T20:34:04Z |
python
| 2021-04-08T19:11:06Z |
changelogs/fragments/remove_connection_become_checks.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,141 |
connection contains deprecated call to be removed in 2.12
|
##### SUMMARY
connection contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/connection/__init__.py:238:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:245:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:252:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:259:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/connection/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74141
|
https://github.com/ansible/ansible/pull/74155
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
|
b07a78b4ba1a43465f7987420c79c240c97a67ae
| 2021-04-05T20:34:04Z |
python
| 2021-04-08T19:11:06Z |
lib/ansible/plugins/connection/__init__.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2015 Toshio Kuratomi <[email protected]>
# (c) 2017, Peter Sprygada <[email protected]>
# (c) 2017 Ansible Project
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fcntl
import os
import shlex
from abc import abstractmethod, abstractproperty
from functools import wraps
from ansible import constants as C
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins import AnsiblePlugin
from ansible.utils.display import Display
from ansible.plugins.loader import connection_loader, get_shell_plugin
from ansible.utils.path import unfrackpath
display = Display()
__all__ = ['ConnectionBase', 'ensure_connect']
BUFSIZE = 65536
def ensure_connect(func):
@wraps(func)
def wrapped(self, *args, **kwargs):
if not self._connected:
self._connect()
return func(self, *args, **kwargs)
return wrapped
class ConnectionBase(AnsiblePlugin):
'''
A base class for connections to contain common code.
'''
has_pipelining = False
has_native_async = False # eg, winrm
always_pipeline_modules = False # eg, winrm
has_tty = True # for interacting with become plugins
# When running over this connection type, prefer modules written in a certain language
# as discovered by the specified file extension. An empty string as the
# language means any language.
module_implementation_preferences = ('',)
allow_executable = True
# the following control whether or not the connection supports the
# persistent connection framework or not
supports_persistence = False
force_persistence = False
default_user = None
def __init__(self, play_context, new_stdin, shell=None, *args, **kwargs):
super(ConnectionBase, self).__init__()
# All these hasattrs allow subclasses to override these parameters
if not hasattr(self, '_play_context'):
# Backwards compat: self._play_context isn't really needed, using set_options/get_option
self._play_context = play_context
if not hasattr(self, '_new_stdin'):
self._new_stdin = new_stdin
if not hasattr(self, '_display'):
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
if not hasattr(self, '_connected'):
self._connected = False
self.success_key = None
self.prompt = None
self._connected = False
self._socket_path = None
# helper plugins
self._shell = shell
# we always must have shell
if not self._shell:
shell_type = play_context.shell if play_context.shell else getattr(self, '_shell_type', None)
self._shell = get_shell_plugin(shell_type=shell_type, executable=self._play_context.executable)
self.become = None
def set_become_plugin(self, plugin):
self.become = plugin
@property
def connected(self):
'''Read-only property holding whether the connection to the remote host is active or closed.'''
return self._connected
@property
def socket_path(self):
'''Read-only property holding the connection socket path for this remote host'''
return self._socket_path
@staticmethod
def _split_ssh_args(argstring):
"""
Takes a string like '-o Foo=1 -o Bar="foo bar"' and returns a
list ['-o', 'Foo=1', '-o', 'Bar=foo bar'] that can be added to
the argument list. The list will not contain any empty elements.
"""
try:
# Python 2.6.x shlex doesn't handle unicode type so we have to
# convert args to byte string for that case. More efficient to
# try without conversion first but python2.6 doesn't throw an
# exception, it merely mangles the output:
# >>> shlex.split(u't e')
# ['t\x00\x00\x00', '\x00\x00\x00e\x00\x00\x00']
return [to_text(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()]
except AttributeError:
# In Python3, shlex.split doesn't work on a byte string.
return [to_text(x.strip()) for x in shlex.split(argstring) if x.strip()]
@abstractproperty
def transport(self):
"""String used to identify this Connection class from other classes"""
pass
@abstractmethod
def _connect(self):
"""Connect to the host we've been initialized with"""
@ensure_connect
@abstractmethod
def exec_command(self, cmd, in_data=None, sudoable=True):
"""Run a command on the remote host.
:arg cmd: byte string containing the command
:kwarg in_data: If set, this data is passed to the command's stdin.
This is used to implement pipelining. Currently not all
connection plugins implement pipelining.
:kwarg sudoable: Tell the connection plugin if we're executing
a command via a privilege escalation mechanism. This may affect
how the connection plugin returns data. Note that not all
connections can handle privilege escalation.
:returns: a tuple of (return code, stdout, stderr) The return code is
an int while stdout and stderr are both byte strings.
When a command is executed, it goes through multiple commands to get
there. It looks approximately like this::
[LocalShell] ConnectionCommand [UsersLoginShell (*)] ANSIBLE_SHELL_EXECUTABLE [(BecomeCommand ANSIBLE_SHELL_EXECUTABLE)] Command
:LocalShell: Is optional. It is run locally to invoke the
``Connection Command``. In most instances, the
``ConnectionCommand`` can be invoked directly instead. The ssh
connection plugin which can have values that need expanding
locally specified via ssh_args is the sole known exception to
this. Shell metacharacters in the command itself should be
processed on the remote machine, not on the local machine so no
shell is needed on the local machine. (Example, ``/bin/sh``)
:ConnectionCommand: This is the command that connects us to the remote
machine to run the rest of the command. ``ansible_user``,
``ansible_ssh_host`` and so forth are fed to this piece of the
command to connect to the correct host (Examples ``ssh``,
``chroot``)
:UsersLoginShell: This shell may or may not be created depending on
the ConnectionCommand used by the connection plugin. This is the
shell that the ``ansible_user`` has configured as their login
shell. In traditional UNIX parlance, this is the last field of
a user's ``/etc/passwd`` entry We do not specifically try to run
the ``UsersLoginShell`` when we connect. Instead it is implicit
in the actions that the ``ConnectionCommand`` takes when it
connects to a remote machine. ``ansible_shell_type`` may be set
to inform ansible of differences in how the ``UsersLoginShell``
handles things like quoting if a shell has different semantics
than the Bourne shell.
:ANSIBLE_SHELL_EXECUTABLE: This is the shell set via the inventory var
``ansible_shell_executable`` or via
``constants.DEFAULT_EXECUTABLE`` if the inventory var is not set.
We explicitly invoke this shell so that we have predictable
quoting rules at this point. ``ANSIBLE_SHELL_EXECUTABLE`` is only
settable by the user because some sudo setups may only allow
invoking a specific shell. (For instance, ``/bin/bash`` may be
allowed but ``/bin/sh``, our default, may not). We invoke this
twice, once after the ``ConnectionCommand`` and once after the
``BecomeCommand``. After the ConnectionCommand, this is run by
the ``UsersLoginShell``. After the ``BecomeCommand`` we specify
that the ``ANSIBLE_SHELL_EXECUTABLE`` is being invoked directly.
:BecomeComand ANSIBLE_SHELL_EXECUTABLE: Is the command that performs
privilege escalation. Setting this up is performed by the action
plugin prior to running ``exec_command``. So we just get passed
:param:`cmd` which has the BecomeCommand already added.
(Examples: sudo, su) If we have a BecomeCommand then we will
invoke a ANSIBLE_SHELL_EXECUTABLE shell inside of it so that we
have a consistent view of quoting.
:Command: Is the command we're actually trying to run remotely.
(Examples: mkdir -p $HOME/.ansible, python $HOME/.ansible/tmp-script-file)
"""
pass
@ensure_connect
@abstractmethod
def put_file(self, in_path, out_path):
"""Transfer a file from local to remote"""
pass
@ensure_connect
@abstractmethod
def fetch_file(self, in_path, out_path):
"""Fetch a file from remote to local; callers are expected to have pre-created the directory chain for out_path"""
pass
@abstractmethod
def close(self):
"""Terminate the connection"""
pass
def connection_lock(self):
f = self._play_context.connection_lockfd
display.vvvv('CONNECTION: pid %d waiting for lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
fcntl.lockf(f, fcntl.LOCK_EX)
display.vvvv('CONNECTION: pid %d acquired lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
def connection_unlock(self):
f = self._play_context.connection_lockfd
fcntl.lockf(f, fcntl.LOCK_UN)
display.vvvv('CONNECTION: pid %d released lock on %d' % (os.getpid(), f), host=self._play_context.remote_addr)
def reset(self):
display.warning("Reset is not implemented for this connection")
# NOTE: these password functions are all become specific, the name is
# confusing as it does not handle 'protocol passwords'
# DEPRECATED:
# These are kept for backwards compatibility
# Use the methods provided by the become plugins instead
def check_become_success(self, b_output):
display.deprecated(
"Connection.check_become_success is deprecated, calling code should be using become plugins instead",
version="2.12", collection_name='ansible.builtin'
)
return self.become.check_success(b_output)
def check_password_prompt(self, b_output):
display.deprecated(
"Connection.check_password_prompt is deprecated, calling code should be using become plugins instead",
version="2.12", collection_name='ansible.builtin'
)
return self.become.check_password_prompt(b_output)
def check_incorrect_password(self, b_output):
display.deprecated(
"Connection.check_incorrect_password is deprecated, calling code should be using become plugins instead",
version="2.12", collection_name='ansible.builtin'
)
return self.become.check_incorrect_password(b_output)
def check_missing_password(self, b_output):
display.deprecated(
"Connection.check_missing_password is deprecated, calling code should be using become plugins instead",
version="2.12", collection_name='ansible.builtin'
)
return self.become.check_missing_password(b_output)
class NetworkConnectionBase(ConnectionBase):
"""
A base class for network-style connections.
"""
force_persistence = True
# Do not use _remote_is_local in other connections
_remote_is_local = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(NetworkConnectionBase, self).__init__(play_context, new_stdin, *args, **kwargs)
self._messages = []
self._conn_closed = False
self._network_os = self._play_context.network_os
self._local = connection_loader.get('local', play_context, '/dev/null')
self._local.set_options()
self._sub_plugin = {}
self._cached_variables = (None, None, None)
# reconstruct the socket_path and set instance values accordingly
self._ansible_playbook_pid = kwargs.get('ansible_playbook_pid')
self._update_connection_state()
def __getattr__(self, name):
try:
return self.__dict__[name]
except KeyError:
if not name.startswith('_'):
plugin = self._sub_plugin.get('obj')
if plugin:
method = getattr(plugin, name, None)
if method is not None:
return method
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
def exec_command(self, cmd, in_data=None, sudoable=True):
return self._local.exec_command(cmd, in_data, sudoable)
def queue_message(self, level, message):
"""
Adds a message to the queue of messages waiting to be pushed back to the controller process.
:arg level: A string which can either be the name of a method in display, or 'log'. When
the messages are returned to task_executor, a value of log will correspond to
``display.display(message, log_only=True)``, while another value will call ``display.[level](message)``
"""
self._messages.append((level, message))
def pop_messages(self):
messages, self._messages = self._messages, []
return messages
def put_file(self, in_path, out_path):
"""Transfer a file from local to remote"""
return self._local.put_file(in_path, out_path)
def fetch_file(self, in_path, out_path):
"""Fetch a file from remote to local"""
return self._local.fetch_file(in_path, out_path)
def reset(self):
'''
Reset the connection
'''
if self._socket_path:
self.queue_message('vvvv', 'resetting persistent connection for socket_path %s' % self._socket_path)
self.close()
self.queue_message('vvvv', 'reset call on connection instance')
def close(self):
self._conn_closed = True
if self._connected:
self._connected = False
def set_options(self, task_keys=None, var_options=None, direct=None):
super(NetworkConnectionBase, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
if self.get_option('persistent_log_messages'):
warning = "Persistent connection logging is enabled for %s. This will log ALL interactions" % self._play_context.remote_addr
logpath = getattr(C, 'DEFAULT_LOG_PATH')
if logpath is not None:
warning += " to %s" % logpath
self.queue_message('warning', "%s and WILL NOT redact sensitive configuration like passwords. USE WITH CAUTION!" % warning)
if self._sub_plugin.get('obj') and self._sub_plugin.get('type') != 'external':
try:
self._sub_plugin['obj'].set_options(task_keys=task_keys, var_options=var_options, direct=direct)
except AttributeError:
pass
def _update_connection_state(self):
'''
Reconstruct the connection socket_path and check if it exists
If the socket path exists then the connection is active and set
both the _socket_path value to the path and the _connected value
to True. If the socket path doesn't exist, leave the socket path
value to None and the _connected value to False
'''
ssh = connection_loader.get('ssh', class_only=True)
control_path = ssh._create_control_path(
self._play_context.remote_addr, self._play_context.port,
self._play_context.remote_user, self._play_context.connection,
self._ansible_playbook_pid
)
tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR)
socket_path = unfrackpath(control_path % dict(directory=tmp_path))
if os.path.exists(socket_path):
self._connected = True
self._socket_path = socket_path
def _log_messages(self, message):
if self.get_option('persistent_log_messages'):
self.queue_message('log', message)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,141 |
connection contains deprecated call to be removed in 2.12
|
##### SUMMARY
connection contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/connection/__init__.py:238:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:245:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:252:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:259:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/connection/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74141
|
https://github.com/ansible/ansible/pull/74155
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
|
b07a78b4ba1a43465f7987420c79c240c97a67ae
| 2021-04-05T20:34:04Z |
python
| 2021-04-08T19:11:06Z |
test/sanity/ignore.txt
|
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
lib/ansible/cli/console.py pylint:blacklisted-name
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/cli/scripts/ansible_cli_stub.py pylint:ansible-deprecated-version
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/pycompat24.py no-get-exception
lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py no-basestring
lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
lib/ansible/module_utils/urls.py pylint:blacklisted-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
lib/ansible/modules/async_wrapper.py use-argspec-type-path
lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
lib/ansible/modules/copy.py pylint:blacklisted-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
lib/ansible/modules/cron.py pylint:ansible-deprecated-version
lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
lib/ansible/modules/git.py pylint:blacklisted-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/iptables.py pylint:blacklisted-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/pip.py pylint:blacklisted-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/uri.py pylint:blacklisted-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:blacklisted-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/__init__.py pylint:ansible-deprecated-version
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/conditional.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/play_context.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/async_status.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/connection/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/script.py pylint:ansible-deprecated-version
lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
lib/ansible/plugins/strategy/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
lib/ansible/vars/fact_cache.py pylint:ansible-deprecated-version
lib/ansible/vars/hostvars.py pylint:blacklisted-name
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/lib/ansible_test/_data/setup/windows-httptester.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-argument-not-valid-type
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/integration/plugins/module_utils/database.py future-import-boilerplate
test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
test/support/integration/plugins/modules/docker_swarm.py pylint:ansible-deprecated-version
test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:blacklisted-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
test/units/modules/test_apt.py pylint:blacklisted-name
test/units/parsing/vault/test_vault.py pylint:blacklisted-name
test/units/playbook/role/test_role.py pylint:blacklisted-name
test/units/plugins/test_plugins.py pylint:blacklisted-name
test/units/template/test_templar.py pylint:blacklisted-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
test/utils/shippable/check_matrix.py replace-urlopen
test/utils/shippable/timing.py shebang
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,141 |
connection contains deprecated call to be removed in 2.12
|
##### SUMMARY
connection contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/connection/__init__.py:238:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:245:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:252:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:259:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/connection/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74141
|
https://github.com/ansible/ansible/pull/74155
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
|
b07a78b4ba1a43465f7987420c79c240c97a67ae
| 2021-04-05T20:34:04Z |
python
| 2021-04-08T19:11:06Z |
test/units/plugins/connection/test_connection.py
|
# (c) 2015, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
import sys
import pytest
from units.compat import mock
from units.compat import unittest
from units.compat.mock import MagicMock
from units.compat.mock import patch
from ansible.errors import AnsibleError
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ConnectionBase
from ansible.plugins.loader import become_loader
class TestConnectionBaseClass(unittest.TestCase):
def setUp(self):
self.play_context = PlayContext()
self.play_context.prompt = (
'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
)
self.in_stream = StringIO()
def tearDown(self):
pass
def test_subclass_error(self):
class ConnectionModule1(ConnectionBase):
pass
with self.assertRaises(TypeError):
ConnectionModule1() # pylint: disable=abstract-class-instantiated
class ConnectionModule2(ConnectionBase):
def get(self, key):
super(ConnectionModule2, self).get(key)
with self.assertRaises(TypeError):
ConnectionModule2() # pylint: disable=abstract-class-instantiated
def test_subclass_success(self):
class ConnectionModule3(ConnectionBase):
@property
def transport(self):
pass
def _connect(self):
pass
def exec_command(self):
pass
def put_file(self):
pass
def fetch_file(self):
pass
def close(self):
pass
self.assertIsInstance(ConnectionModule3(self.play_context, self.in_stream), ConnectionModule3)
def test_check_password_prompt(self):
local = (
b'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: \n'
b'BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq\n'
)
ssh_pipelining_vvvv = b'''
debug3: mux_master_read_cb: channel 1 packet type 0x10000002 len 251
debug2: process_mux_new_session: channel 1: request tty 0, X 1, agent 1, subsys 0, term "xterm-256color", cmd "/bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq; /bin/true'"'"' && sleep 0'", env 0
debug3: process_mux_new_session: got fds stdin 9, stdout 10, stderr 11
debug2: client_session2_setup: id 2
debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq; /bin/true'"'"' && sleep 0'
debug2: channel 2: request exec confirm 1
debug2: channel 2: rcvd ext data 67
[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: debug2: channel 2: written 67 to efd 11
BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq
debug3: receive packet: type 98
''' # noqa
ssh_nopipelining_vvvv = b'''
debug3: mux_master_read_cb: channel 1 packet type 0x10000002 len 251
debug2: process_mux_new_session: channel 1: request tty 1, X 1, agent 1, subsys 0, term "xterm-256color", cmd "/bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq; /bin/true'"'"' && sleep 0'", env 0
debug3: mux_client_request_session: session request sent
debug3: send packet: type 98
debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq; /bin/true'"'"' && sleep 0'
debug2: channel 2: request exec confirm 1
debug2: exec request accepted on channel 2
[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: debug3: receive packet: type 2
debug3: Received SSH2_MSG_IGNORE
debug3: Received SSH2_MSG_IGNORE
BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq
debug3: receive packet: type 98
''' # noqa
ssh_novvvv = (
b'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: \n'
b'BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq\n'
)
dns_issue = (
b'timeout waiting for privilege escalation password prompt:\n'
b'sudo: sudo: unable to resolve host tcloud014\n'
b'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: \n'
b'BECOME-SUCCESS-ouzmdnewuhucvuaabtjmweasarviygqq\n'
)
nothing = b''
in_front = b'''
debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: " -u root /bin/sh -c '"'"'echo
'''
class ConnectionFoo(ConnectionBase):
@property
def transport(self):
pass
def _connect(self):
pass
def exec_command(self):
pass
def put_file(self):
pass
def fetch_file(self):
pass
def close(self):
pass
c = ConnectionFoo(self.play_context, self.in_stream)
c.set_become_plugin(become_loader.get('sudo'))
c.become.prompt = '[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
self.assertTrue(c.check_password_prompt(local))
self.assertTrue(c.check_password_prompt(ssh_pipelining_vvvv))
self.assertTrue(c.check_password_prompt(ssh_nopipelining_vvvv))
self.assertTrue(c.check_password_prompt(ssh_novvvv))
self.assertTrue(c.check_password_prompt(dns_issue))
self.assertFalse(c.check_password_prompt(nothing))
self.assertFalse(c.check_password_prompt(in_front))
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,141 |
connection contains deprecated call to be removed in 2.12
|
##### SUMMARY
connection contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/connection/__init__.py:238:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:245:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:252:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/connection/__init__.py:259:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/connection/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74141
|
https://github.com/ansible/ansible/pull/74155
|
45ab6fddf30aeb696ea22014bdc6b3da1919d598
|
b07a78b4ba1a43465f7987420c79c240c97a67ae
| 2021-04-05T20:34:04Z |
python
| 2021-04-08T19:11:06Z |
test/units/plugins/connection/test_ssh.py
|
# -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
import pytest
from ansible import constants as C
from ansible.errors import AnsibleAuthenticationFailure
from units.compat import unittest
from units.compat.mock import patch, MagicMock, PropertyMock
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils.compat.selectors import SelectorKey, EVENT_READ
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
from ansible.plugins.loader import connection_loader, become_loader
class TestConnectionBaseClass(unittest.TestCase):
def test_plugins_connection_ssh_module(self):
play_context = PlayContext()
play_context.prompt = (
'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
)
in_stream = StringIO()
self.assertIsInstance(ssh.Connection(play_context, in_stream), ssh.Connection)
def test_plugins_connection_ssh_basic(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
# connect just returns self, so assert that
res = conn._connect()
self.assertEqual(conn, res)
ssh.SSHPASS_AVAILABLE = False
self.assertFalse(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = True
self.assertTrue(conn._sshpass_available())
with patch('subprocess.Popen') as p:
ssh.SSHPASS_AVAILABLE = None
p.return_value = MagicMock()
self.assertTrue(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = None
p.return_value = None
p.side_effect = OSError()
self.assertFalse(conn._sshpass_available())
conn.close()
self.assertFalse(conn._connected)
def test_plugins_connection_ssh__build_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command('ssh', 'ssh')
def test_plugins_connection_ssh_exec_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command = MagicMock()
conn._build_command.return_value = 'ssh something something'
conn._run = MagicMock()
conn._run.return_value = (0, 'stdout', 'stderr')
conn.get_option = MagicMock()
conn.get_option.return_value = True
res, stdout, stderr = conn.exec_command('ssh')
res, stdout, stderr = conn.exec_command('ssh', 'this is some data')
def test_plugins_connection_ssh__examine_output(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn.set_become_plugin(become_loader.get('sudo'))
conn.check_password_prompt = MagicMock()
conn.check_become_success = MagicMock()
conn.check_incorrect_password = MagicMock()
conn.check_missing_password = MagicMock()
def _check_password_prompt(line):
if b'foo' in line:
return True
return False
def _check_become_success(line):
if b'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' in line:
return True
return False
def _check_incorrect_password(line):
if b'incorrect password' in line:
return True
return False
def _check_missing_password(line):
if b'bad password' in line:
return True
return False
conn.become.check_password_prompt = MagicMock(side_effect=_check_password_prompt)
conn.become.check_become_success = MagicMock(side_effect=_check_become_success)
conn.become.check_incorrect_password = MagicMock(side_effect=_check_incorrect_password)
conn.become.check_missing_password = MagicMock(side_effect=_check_missing_password)
# test examining output for prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = True
conn.become.prompt = True
def get_option(option):
if option == 'become_pass':
return 'password'
return None
conn.become.get_option = get_option
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'this should be the remainder')
self.assertTrue(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz'
conn.become.success = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz'
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nBECOME-SUCCESS-abcdefghijklmnopqrstuvxyz\nline 3\n', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertTrue(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become failure
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nincorrect password\n', True)
self.assertEqual(output, b'line 1\nline 2\nincorrect password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertTrue(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for missing password
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nbad password\n', True)
self.assertEqual(output, b'line 1\nbad password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertTrue(conn._flags['become_nopasswd_error'])
@patch('time.sleep')
@patch('os.path.exists')
def test_plugins_connection_ssh_put_file(self, mock_ospe, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command = MagicMock()
conn._bare_run = MagicMock()
mock_ospe.return_value = True
conn._build_command.return_value = 'some command to run'
conn._bare_run.return_value = (0, '', '')
conn.host = "some_host"
conn.set_option('retries', 9)
conn.set_option('ssh_transfer_method', None) # unless set to None scp_if_ssh is ignored
# Test with SCP_IF_SSH set to smart
# Test when SFTP works
conn.set_option('scp_if_ssh', 'smart')
expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test when SFTP doesn't work but SCP does
conn._bare_run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn._bare_run.side_effect = None
# test with SCP_IF_SSH enabled
conn.set_option('scp_if_ssh', True)
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
# test with SCPP_IF_SSH disabled
conn.set_option('scp_if_ssh', False)
expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
expected_in_data = b' '.join((b'put',
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# test that a non-zero rc raises an error
conn._bare_run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
# test that a not-found path raises an error
mock_ospe.return_value = False
conn._bare_run.return_value = (0, 'stdout', '')
self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
@patch('time.sleep')
def test_plugins_connection_ssh_fetch_file(self, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command = MagicMock()
conn._bare_run = MagicMock()
conn._load_name = 'ssh'
conn._build_command.return_value = 'some command to run'
conn._bare_run.return_value = (0, '', '')
conn.host = "some_host"
conn.set_option('retries', 9)
conn.set_option('ssh_transfer_method', None) # unless set to None scp_if_ssh is ignored
# Test with SCP_IF_SSH set to smart
# Test when SFTP works
conn.set_option('scp_if_ssh', 'smart')
expected_in_data = b' '.join((b'get', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.set_options({})
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test when SFTP doesn't work but SCP does
conn._bare_run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
# test with SCP_IF_SSH enabled
conn._bare_run.side_effect = None
conn.set_option('ssh_transfer_method', None) # unless set to None scp_if_ssh is ignored
conn.set_option('scp_if_ssh', 'True')
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
# test with SCP_IF_SSH disabled
conn.set_option('scp_if_ssh', False)
expected_in_data = b' '.join((b'get', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
expected_in_data = b' '.join((b'get',
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# test that a non-zero rc raises an error
conn._bare_run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.fetch_file, '/path/to/bad/file', '/remote/path/to/file')
class MockSelector(object):
def __init__(self):
self.files_watched = 0
self.register = MagicMock(side_effect=self._register)
self.unregister = MagicMock(side_effect=self._unregister)
self.close = MagicMock()
self.get_map = MagicMock(side_effect=self._get_map)
self.select = MagicMock()
def _register(self, *args, **kwargs):
self.files_watched += 1
def _unregister(self, *args, **kwargs):
self.files_watched -= 1
def _get_map(self, *args, **kwargs):
return self.files_watched
@pytest.fixture
def mock_run_env(request, mocker):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn.set_become_plugin(become_loader.get('sudo'))
conn._send_initial_data = MagicMock()
conn._examine_output = MagicMock()
conn._terminate_process = MagicMock()
conn._load_name = 'ssh'
conn.sshpass_pipe = [MagicMock(), MagicMock()]
request.cls.pc = pc
request.cls.conn = conn
mock_popen_res = MagicMock()
mock_popen_res.poll = MagicMock()
mock_popen_res.wait = MagicMock()
mock_popen_res.stdin = MagicMock()
mock_popen_res.stdin.fileno.return_value = 1000
mock_popen_res.stdout = MagicMock()
mock_popen_res.stdout.fileno.return_value = 1001
mock_popen_res.stderr = MagicMock()
mock_popen_res.stderr.fileno.return_value = 1002
mock_popen_res.returncode = 0
request.cls.mock_popen_res = mock_popen_res
mock_popen = mocker.patch('subprocess.Popen', return_value=mock_popen_res)
request.cls.mock_popen = mock_popen
request.cls.mock_selector = MockSelector()
mocker.patch('ansible.module_utils.compat.selectors.DefaultSelector', lambda: request.cls.mock_selector)
request.cls.mock_openpty = mocker.patch('pty.openpty')
mocker.patch('fcntl.fcntl')
mocker.patch('os.write')
mocker.patch('os.close')
@pytest.mark.usefixtures('mock_run_env')
class TestSSHConnectionRun(object):
# FIXME:
# These tests are little more than a smoketest. Need to enhance them
# a bit to check that they're calling the relevant functions and making
# complete coverage of the code paths
def test_no_escalation(self):
self.mock_popen_res.stdout.read.side_effect = [b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"my_stderr"]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b'my_stdout\nsecond_line'
assert b_stderr == b'my_stderr'
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_with_password(self):
# test with a password set to trigger the sshpass write
self.pc.password = '12345'
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run(["ssh", "is", "a", "cmd"], "this is more data")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is more data'
def _password_with_prompt_examine_output(self, sourice, state, b_chunk, sudoable):
if state == 'awaiting_prompt':
self.conn._flags['become_prompt'] = True
elif state == 'awaiting_escalation':
self.conn._flags['become_success'] = True
return (b'', b'')
def test_password_with_prompt(self):
# test with password prompting enabled
self.pc.password = None
self.conn.become.prompt = b'Password:'
self.conn._examine_output.side_effect = self._password_with_prompt_examine_output
self.mock_popen_res.stdout.read.side_effect = [b"Password:", b"Success", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ),
(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b''
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_password_with_become(self):
# test with some become settings
self.pc.prompt = b'Password:'
self.conn.become.prompt = b'Password:'
self.pc.become = True
self.pc.success_key = 'BECOME-SUCCESS-abcdefg'
self.conn.become._id = 'abcdefg'
self.conn._examine_output.side_effect = self._password_with_prompt_examine_output
self.mock_popen_res.stdout.read.side_effect = [b"Password:", b"BECOME-SUCCESS-abcdefg", b"abc"]
self.mock_popen_res.stderr.read.side_effect = [b"123"]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
self.mock_popen_res.stdin.flush.assert_called_once_with()
assert return_code == 0
assert b_stdout == b'abc'
assert b_stderr == b'123'
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_pasword_without_data(self):
# simulate no data input but Popen using new pty's fails
self.mock_popen.return_value = None
self.mock_popen.side_effect = [OSError(), self.mock_popen_res]
# simulate no data input
self.mock_openpty.return_value = (98, 99)
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is False
@pytest.mark.usefixtures('mock_run_env')
class TestSSHConnectionRetries(object):
def test_incorrect_password(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('retries', 5)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b'']
self.mock_popen_res.stderr.read.side_effect = [b'Permission denied, please try again.\r\n']
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[5] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = [b'sshpass', b'-d41', b'ssh', b'-C']
exception_info = pytest.raises(AnsibleAuthenticationFailure, self.conn.exec_command, 'sshpass', 'some data')
assert exception_info.value.message == ('Invalid/incorrect username/password. Skipping remaining 5 retries to prevent account lockout: '
'Permission denied, please try again.')
assert self.mock_popen.call_count == 1
def test_retry_then_success(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('retries', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 3 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
return_code, b_stdout, b_stderr = self.conn.exec_command('ssh', 'some data')
assert return_code == 0
assert b_stdout == b'my_stdout\nsecond_line'
assert b_stderr == b'my_stderr'
def test_multiple_failures(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('retries', 9)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b""] * 10
self.mock_popen_res.stderr.read.side_effect = [b""] * 10
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 30)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
] * 10
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
pytest.raises(AnsibleConnectionFailure, self.conn.exec_command, 'ssh', 'some data')
assert self.mock_popen.call_count == 10
def test_abitrary_exceptions(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('retries', 9)
monkeypatch.setattr('time.sleep', lambda x: None)
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
self.mock_popen.side_effect = [Exception('bad')] * 10
pytest.raises(Exception, self.conn.exec_command, 'ssh', 'some data')
assert self.mock_popen.call_count == 10
def test_put_file_retries(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('retries', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 4 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'sftp'
return_code, b_stdout, b_stderr = self.conn.put_file('/path/to/in/file', '/path/to/dest/file')
assert return_code == 0
assert b_stdout == b"my_stdout\nsecond_line"
assert b_stderr == b"my_stderr"
assert self.mock_popen.call_count == 2
def test_fetch_file_retries(self, monkeypatch):
self.conn.set_option('host_key_checking', False)
self.conn.set_option('retries', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 4 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'sftp'
return_code, b_stdout, b_stderr = self.conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
assert return_code == 0
assert b_stdout == b"my_stdout\nsecond_line"
assert b_stderr == b"my_stderr"
assert self.mock_popen.call_count == 2
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,132 |
cron contains deprecated call to be removed in 2.12
|
##### SUMMARY
cron contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/modules/cron.py:626:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/modules/cron.py:631:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/modules/cron.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74132
|
https://github.com/ansible/ansible/pull/74197
|
d5ce6e6bed91aed3425062d9315f400d31881775
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
| 2021-04-05T20:33:53Z |
python
| 2021-04-08T19:50:27Z |
changelogs/fragments/cron-deprecations.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,132 |
cron contains deprecated call to be removed in 2.12
|
##### SUMMARY
cron contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/modules/cron.py:626:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/modules/cron.py:631:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/modules/cron.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74132
|
https://github.com/ansible/ansible/pull/74197
|
d5ce6e6bed91aed3425062d9315f400d31881775
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
| 2021-04-05T20:33:53Z |
python
| 2021-04-08T19:50:27Z |
docs/docsite/rst/porting_guides/porting_guide_core_2.12.rst
|
.. _porting_2.12_guide:
**************************
Ansible 2.12 Porting Guide
**************************
This section discusses the behavioral changes between Ansible 2.11 and Ansible 2.12.
It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible.
We suggest you read this page along with `Ansible Changelog for 2.12 <https://github.com/ansible/ansible/blob/devel/changelogs/CHANGELOG-v2.12.rst>`_ to understand what updates you may need to make.
This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides <porting_guides>`.
.. contents:: Topics
Playbook
========
No notable changes
Command Line
============
No notable changes
Deprecated
==========
No notable changes
Modules
=======
No notable changes
Modules removed
---------------
The following modules no longer exist:
* No notable changes
Deprecation notices
-------------------
No notable changes
Noteworthy module changes
-------------------------
No notable changes
Plugins
=======
No notable changes
Porting custom scripts
======================
No notable changes
Networking
==========
No notable changes
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,132 |
cron contains deprecated call to be removed in 2.12
|
##### SUMMARY
cron contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/modules/cron.py:626:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/modules/cron.py:631:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/modules/cron.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74132
|
https://github.com/ansible/ansible/pull/74197
|
d5ce6e6bed91aed3425062d9315f400d31881775
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
| 2021-04-05T20:33:53Z |
python
| 2021-04-08T19:50:27Z |
lib/ansible/modules/cron.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Dane Summers <[email protected]>
# Copyright: (c) 2013, Mike Grozak <[email protected]>
# Copyright: (c) 2013, Patrick Callahan <[email protected]>
# Copyright: (c) 2015, Evan Kaufman <[email protected]>
# Copyright: (c) 2015, Luca Berruti <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: cron
short_description: Manage cron.d and crontab entries
description:
- Use this module to manage crontab and environment variables entries. This module allows
you to create environment variables and named crontab entries, update, or delete them.
- 'When crontab jobs are managed: the module includes one line with the description of the
crontab entry C("#Ansible: <name>") corresponding to the "name" passed to the module,
which is used by future ansible/module calls to find/check the state. The "name"
parameter should be unique, and changing the "name" value will result in a new cron
task being created (or a different one being removed).'
- When environment variables are managed, no comment line is added, but, when the module
needs to find/check the state, it uses the "name" parameter to find the environment
variable definition line.
- When using symbols such as %, they must be properly escaped.
version_added: "0.9"
options:
name:
description:
- Description of a crontab entry or, if env is set, the name of environment variable.
- Required if I(state=absent).
- Note that if name is not set and I(state=present), then a
new crontab entry will always be created, regardless of existing ones.
- This parameter will always be required in future releases.
type: str
user:
description:
- The specific user whose crontab should be modified.
- When unset, this parameter defaults to the current user.
type: str
job:
description:
- The command to execute or, if env is set, the value of environment variable.
- The command should not contain line breaks.
- Required if I(state=present).
type: str
aliases: [ value ]
state:
description:
- Whether to ensure the job or environment variable is present or absent.
type: str
choices: [ absent, present ]
default: present
cron_file:
description:
- If specified, uses this file instead of an individual user's crontab.
- If this is a relative path, it is interpreted with respect to I(/etc/cron.d).
- If it is absolute, it will typically be C(/etc/crontab).
- Many linux distros expect (and some require) the filename portion to consist solely
of upper- and lower-case letters, digits, underscores, and hyphens.
- To use the I(cron_file) parameter you must specify the I(user) as well.
type: str
backup:
description:
- If set, create a backup of the crontab before it is modified.
The location of the backup is returned in the C(backup_file) variable by this module.
type: bool
default: no
minute:
description:
- Minute when the job should run (C(0-59), C(*), C(*/2), and so on).
type: str
default: "*"
hour:
description:
- Hour when the job should run (C(0-23), C(*), C(*/2), and so on).
type: str
default: "*"
day:
description:
- Day of the month the job should run (C(1-31), C(*), C(*/2), and so on).
type: str
default: "*"
aliases: [ dom ]
month:
description:
- Month of the year the job should run (C(1-12), C(*), C(*/2), and so on).
type: str
default: "*"
weekday:
description:
- Day of the week that the job should run (C(0-6) for Sunday-Saturday, C(*), and so on).
type: str
default: "*"
aliases: [ dow ]
reboot:
description:
- If the job should be run at reboot. This option is deprecated. Users should use I(special_time).
version_added: "1.0"
type: bool
default: no
special_time:
description:
- Special time specification nickname.
type: str
choices: [ annually, daily, hourly, monthly, reboot, weekly, yearly ]
version_added: "1.3"
disabled:
description:
- If the job should be disabled (commented out) in the crontab.
- Only has effect if I(state=present).
type: bool
default: no
version_added: "2.0"
env:
description:
- If set, manages a crontab's environment variable.
- New variables are added on top of crontab.
- I(name) and I(value) parameters are the name and the value of environment variable.
type: bool
default: false
version_added: "2.1"
insertafter:
description:
- Used with I(state=present) and I(env).
- If specified, the environment variable will be inserted after the declaration of specified environment variable.
type: str
version_added: "2.1"
insertbefore:
description:
- Used with I(state=present) and I(env).
- If specified, the environment variable will be inserted before the declaration of specified environment variable.
type: str
version_added: "2.1"
requirements:
- cron (or cronie on CentOS)
author:
- Dane Summers (@dsummersl)
- Mike Grozak (@rhaido)
- Patrick Callahan (@dirtyharrycallahan)
- Evan Kaufman (@EvanK)
- Luca Berruti (@lberruti)
notes:
- Supports C(check_mode).
'''
EXAMPLES = r'''
- name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null"
ansible.builtin.cron:
name: "check dirs"
minute: "0"
hour: "5,2"
job: "ls -alh > /dev/null"
- name: 'Ensure an old job is no longer present. Removes any job that is prefixed by "#Ansible: an old job" from the crontab'
ansible.builtin.cron:
name: "an old job"
state: absent
- name: Creates an entry like "@reboot /some/job.sh"
ansible.builtin.cron:
name: "a job for reboot"
special_time: reboot
job: "/some/job.sh"
- name: Creates an entry like "PATH=/opt/bin" on top of crontab
ansible.builtin.cron:
name: PATH
env: yes
job: /opt/bin
- name: Creates an entry like "APP_HOME=/srv/app" and insert it after PATH declaration
ansible.builtin.cron:
name: APP_HOME
env: yes
job: /srv/app
insertafter: PATH
- name: Creates a cron file under /etc/cron.d
ansible.builtin.cron:
name: yum autoupdate
weekday: "2"
minute: "0"
hour: "12"
user: root
job: "YUMINTERACTIVE=0 /usr/sbin/yum-autoupdate"
cron_file: ansible_yum-autoupdate
- name: Removes a cron file from under /etc/cron.d
ansible.builtin.cron:
name: "yum autoupdate"
cron_file: ansible_yum-autoupdate
state: absent
- name: Removes "APP_HOME" environment variable from crontab
ansible.builtin.cron:
name: APP_HOME
env: yes
state: absent
'''
RETURN = r'''#'''
import os
import platform
import pwd
import re
import sys
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.converters import to_bytes, to_native
from ansible.module_utils.six.moves import shlex_quote
class CronTabError(Exception):
pass
class CronTab(object):
"""
CronTab object to write time based crontab file
user - the user of the crontab (defaults to current user)
cron_file - a cron file under /etc/cron.d, or an absolute path
"""
def __init__(self, module, user=None, cron_file=None):
self.module = module
self.user = user
self.root = (os.getuid() == 0)
self.lines = None
self.ansible = "#Ansible: "
self.n_existing = ''
self.cron_cmd = self.module.get_bin_path('crontab', required=True)
if cron_file:
if os.path.isabs(cron_file):
self.cron_file = cron_file
self.b_cron_file = to_bytes(cron_file, errors='surrogate_or_strict')
else:
self.cron_file = os.path.join('/etc/cron.d', cron_file)
self.b_cron_file = os.path.join(b'/etc/cron.d', to_bytes(cron_file, errors='surrogate_or_strict'))
else:
self.cron_file = None
self.read()
def read(self):
# Read in the crontab from the system
self.lines = []
if self.cron_file:
# read the cronfile
try:
f = open(self.b_cron_file, 'rb')
self.n_existing = to_native(f.read(), errors='surrogate_or_strict')
self.lines = self.n_existing.splitlines()
f.close()
except IOError:
# cron file does not exist
return
except Exception:
raise CronTabError("Unexpected error:", sys.exc_info()[0])
else:
# using safely quoted shell for now, but this really should be two non-shell calls instead. FIXME
(rc, out, err) = self.module.run_command(self._read_user_execute(), use_unsafe_shell=True)
if rc != 0 and rc != 1: # 1 can mean that there are no jobs.
raise CronTabError("Unable to read crontab")
self.n_existing = out
lines = out.splitlines()
count = 0
for l in lines:
if count > 2 or (not re.match(r'# DO NOT EDIT THIS FILE - edit the master and reinstall.', l) and
not re.match(r'# \(/tmp/.*installed on.*\)', l) and
not re.match(r'# \(.*version.*\)', l)):
self.lines.append(l)
else:
pattern = re.escape(l) + '[\r\n]?'
self.n_existing = re.sub(pattern, '', self.n_existing, 1)
count += 1
def is_empty(self):
if len(self.lines) == 0:
return True
else:
return False
def write(self, backup_file=None):
"""
Write the crontab to the system. Saves all information.
"""
if backup_file:
fileh = open(backup_file, 'wb')
elif self.cron_file:
fileh = open(self.b_cron_file, 'wb')
else:
filed, path = tempfile.mkstemp(prefix='crontab')
os.chmod(path, int('0644', 8))
fileh = os.fdopen(filed, 'wb')
fileh.write(to_bytes(self.render()))
fileh.close()
# return if making a backup
if backup_file:
return
# Add the entire crontab back to the user crontab
if not self.cron_file:
# quoting shell args for now but really this should be two non-shell calls. FIXME
(rc, out, err) = self.module.run_command(self._write_execute(path), use_unsafe_shell=True)
os.unlink(path)
if rc != 0:
self.module.fail_json(msg=err)
# set SELinux permissions
if self.module.selinux_enabled() and self.cron_file:
self.module.set_default_selinux_context(self.cron_file, False)
def do_comment(self, name):
return "%s%s" % (self.ansible, name)
def add_job(self, name, job):
# Add the comment
self.lines.append(self.do_comment(name))
# Add the job
self.lines.append("%s" % (job))
def update_job(self, name, job):
return self._update_job(name, job, self.do_add_job)
def do_add_job(self, lines, comment, job):
lines.append(comment)
lines.append("%s" % (job))
def remove_job(self, name):
return self._update_job(name, "", self.do_remove_job)
def do_remove_job(self, lines, comment, job):
return None
def add_env(self, decl, insertafter=None, insertbefore=None):
if not (insertafter or insertbefore):
self.lines.insert(0, decl)
return
if insertafter:
other_name = insertafter
elif insertbefore:
other_name = insertbefore
other_decl = self.find_env(other_name)
if len(other_decl) > 0:
if insertafter:
index = other_decl[0] + 1
elif insertbefore:
index = other_decl[0]
self.lines.insert(index, decl)
return
self.module.fail_json(msg="Variable named '%s' not found." % other_name)
def update_env(self, name, decl):
return self._update_env(name, decl, self.do_add_env)
def do_add_env(self, lines, decl):
lines.append(decl)
def remove_env(self, name):
return self._update_env(name, '', self.do_remove_env)
def do_remove_env(self, lines, decl):
return None
def remove_job_file(self):
try:
os.unlink(self.cron_file)
return True
except OSError:
# cron file does not exist
return False
except Exception:
raise CronTabError("Unexpected error:", sys.exc_info()[0])
def find_job(self, name, job=None):
# attempt to find job by 'Ansible:' header comment
comment = None
for l in self.lines:
if comment is not None:
if comment == name:
return [comment, l]
else:
comment = None
elif re.match(r'%s' % self.ansible, l):
comment = re.sub(r'%s' % self.ansible, '', l)
# failing that, attempt to find job by exact match
if job:
for i, l in enumerate(self.lines):
if l == job:
# if no leading ansible header, insert one
if not re.match(r'%s' % self.ansible, self.lines[i - 1]):
self.lines.insert(i, self.do_comment(name))
return [self.lines[i], l, True]
# if a leading blank ansible header AND job has a name, update header
elif name and self.lines[i - 1] == self.do_comment(None):
self.lines[i - 1] = self.do_comment(name)
return [self.lines[i - 1], l, True]
return []
def find_env(self, name):
for index, l in enumerate(self.lines):
if re.match(r'^%s=' % name, l):
return [index, l]
return []
def get_cron_job(self, minute, hour, day, month, weekday, job, special, disabled):
# normalize any leading/trailing newlines (ansible/ansible-modules-core#3791)
job = job.strip('\r\n')
if disabled:
disable_prefix = '#'
else:
disable_prefix = ''
if special:
if self.cron_file:
return "%s@%s %s %s" % (disable_prefix, special, self.user, job)
else:
return "%s@%s %s" % (disable_prefix, special, job)
else:
if self.cron_file:
return "%s%s %s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, self.user, job)
else:
return "%s%s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, job)
def get_jobnames(self):
jobnames = []
for l in self.lines:
if re.match(r'%s' % self.ansible, l):
jobnames.append(re.sub(r'%s' % self.ansible, '', l))
return jobnames
def get_envnames(self):
envnames = []
for l in self.lines:
if re.match(r'^\S+=', l):
envnames.append(l.split('=')[0])
return envnames
def _update_job(self, name, job, addlinesfunction):
ansiblename = self.do_comment(name)
newlines = []
comment = None
for l in self.lines:
if comment is not None:
addlinesfunction(newlines, comment, job)
comment = None
elif l == ansiblename:
comment = l
else:
newlines.append(l)
self.lines = newlines
if len(newlines) == 0:
return True
else:
return False # TODO add some more error testing
def _update_env(self, name, decl, addenvfunction):
newlines = []
for l in self.lines:
if re.match(r'^%s=' % name, l):
addenvfunction(newlines, decl)
else:
newlines.append(l)
self.lines = newlines
def render(self):
"""
Render this crontab as it would be in the crontab.
"""
crons = []
for cron in self.lines:
crons.append(cron)
result = '\n'.join(crons)
if result:
result = result.rstrip('\r\n') + '\n'
return result
def _read_user_execute(self):
"""
Returns the command line for reading a crontab
"""
user = ''
if self.user:
if platform.system() == 'SunOS':
return "su %s -c '%s -l'" % (shlex_quote(self.user), shlex_quote(self.cron_cmd))
elif platform.system() == 'AIX':
return "%s -l %s" % (shlex_quote(self.cron_cmd), shlex_quote(self.user))
elif platform.system() == 'HP-UX':
return "%s %s %s" % (self.cron_cmd, '-l', shlex_quote(self.user))
elif pwd.getpwuid(os.getuid())[0] != self.user:
user = '-u %s' % shlex_quote(self.user)
return "%s %s %s" % (self.cron_cmd, user, '-l')
def _write_execute(self, path):
"""
Return the command line for writing a crontab
"""
user = ''
if self.user:
if platform.system() in ['SunOS', 'HP-UX', 'AIX']:
return "chown %s %s ; su '%s' -c '%s %s'" % (
shlex_quote(self.user), shlex_quote(path), shlex_quote(self.user), self.cron_cmd, shlex_quote(path))
elif pwd.getpwuid(os.getuid())[0] != self.user:
user = '-u %s' % shlex_quote(self.user)
return "%s %s %s" % (self.cron_cmd, user, shlex_quote(path))
def main():
# The following example playbooks:
#
# - cron: name="check dirs" hour="5,2" job="ls -alh > /dev/null"
#
# - name: do the job
# cron: name="do the job" hour="5,2" job="/some/dir/job.sh"
#
# - name: no job
# cron: name="an old job" state=absent
#
# - name: sets env
# cron: name="PATH" env=yes value="/bin:/usr/bin"
#
# Would produce:
# PATH=/bin:/usr/bin
# # Ansible: check dirs
# * * 5,2 * * ls -alh > /dev/null
# # Ansible: do the job
# * * 5,2 * * /some/dir/job.sh
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str'),
user=dict(type='str'),
job=dict(type='str', aliases=['value']),
cron_file=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'absent']),
backup=dict(type='bool', default=False),
minute=dict(type='str', default='*'),
hour=dict(type='str', default='*'),
day=dict(type='str', default='*', aliases=['dom']),
month=dict(type='str', default='*'),
weekday=dict(type='str', default='*', aliases=['dow']),
reboot=dict(type='bool', default=False),
special_time=dict(type='str', choices=["reboot", "yearly", "annually", "monthly", "weekly", "daily", "hourly"]),
disabled=dict(type='bool', default=False),
env=dict(type='bool', default=False),
insertafter=dict(type='str'),
insertbefore=dict(type='str'),
),
supports_check_mode=True,
mutually_exclusive=[
['reboot', 'special_time'],
['insertafter', 'insertbefore'],
],
)
name = module.params['name']
user = module.params['user']
job = module.params['job']
cron_file = module.params['cron_file']
state = module.params['state']
backup = module.params['backup']
minute = module.params['minute']
hour = module.params['hour']
day = module.params['day']
month = module.params['month']
weekday = module.params['weekday']
reboot = module.params['reboot']
special_time = module.params['special_time']
disabled = module.params['disabled']
env = module.params['env']
insertafter = module.params['insertafter']
insertbefore = module.params['insertbefore']
do_install = state == 'present'
changed = False
res_args = dict()
warnings = list()
if cron_file:
cron_file_basename = os.path.basename(cron_file)
if not re.search(r'^[A-Z0-9_-]+$', cron_file_basename, re.I):
warnings.append('Filename portion of cron_file ("%s") should consist' % cron_file_basename +
' solely of upper- and lower-case letters, digits, underscores, and hyphens')
# Ensure all files generated are only writable by the owning user. Primarily relevant for the cron_file option.
os.umask(int('022', 8))
crontab = CronTab(module, user, cron_file)
module.debug('cron instantiated - name: "%s"' % name)
if not name:
module.deprecate(
msg="The 'name' parameter will be required in future releases.",
version='2.12', collection_name='ansible.builtin'
)
if reboot:
module.deprecate(
msg="The 'reboot' parameter will be removed in future releases. Use 'special_time' option instead.",
version='2.12', collection_name='ansible.builtin'
)
if module._diff:
diff = dict()
diff['before'] = crontab.n_existing
if crontab.cron_file:
diff['before_header'] = crontab.cron_file
else:
if crontab.user:
diff['before_header'] = 'crontab for user "%s"' % crontab.user
else:
diff['before_header'] = 'crontab'
# --- user input validation ---
if env and not name:
module.fail_json(msg="You must specify 'name' while working with environment variables (env=yes)")
if (special_time or reboot) and \
(True in [(x != '*') for x in [minute, hour, day, month, weekday]]):
module.fail_json(msg="You must specify time and date fields or special time.")
# cannot support special_time on solaris
if (special_time or reboot) and platform.system() == 'SunOS':
module.fail_json(msg="Solaris does not support special_time=... or @reboot")
if cron_file and do_install:
if not user:
module.fail_json(msg="To use cron_file=... parameter you must specify user=... as well")
if job is None and do_install:
module.fail_json(msg="You must specify 'job' to install a new cron job or variable")
if (insertafter or insertbefore) and not env and do_install:
module.fail_json(msg="Insertafter and insertbefore parameters are valid only with env=yes")
if reboot:
special_time = "reboot"
# if requested make a backup before making a change
if backup and not module.check_mode:
(backuph, backup_file) = tempfile.mkstemp(prefix='crontab')
crontab.write(backup_file)
if crontab.cron_file and not do_install:
if module._diff:
diff['after'] = ''
diff['after_header'] = '/dev/null'
else:
diff = dict()
if module.check_mode:
changed = os.path.isfile(crontab.cron_file)
else:
changed = crontab.remove_job_file()
module.exit_json(changed=changed, cron_file=cron_file, state=state, diff=diff)
if env:
if ' ' in name:
module.fail_json(msg="Invalid name for environment variable")
decl = '%s="%s"' % (name, job)
old_decl = crontab.find_env(name)
if do_install:
if len(old_decl) == 0:
crontab.add_env(decl, insertafter, insertbefore)
changed = True
if len(old_decl) > 0 and old_decl[1] != decl:
crontab.update_env(name, decl)
changed = True
else:
if len(old_decl) > 0:
crontab.remove_env(name)
changed = True
else:
if do_install:
for char in ['\r', '\n']:
if char in job.strip('\r\n'):
warnings.append('Job should not contain line breaks')
break
job = crontab.get_cron_job(minute, hour, day, month, weekday, job, special_time, disabled)
old_job = crontab.find_job(name, job)
if len(old_job) == 0:
crontab.add_job(name, job)
changed = True
if len(old_job) > 0 and old_job[1] != job:
crontab.update_job(name, job)
changed = True
if len(old_job) > 2:
crontab.update_job(name, job)
changed = True
else:
old_job = crontab.find_job(name)
if len(old_job) > 0:
crontab.remove_job(name)
changed = True
# no changes to env/job, but existing crontab needs a terminating newline
if not changed and crontab.n_existing != '':
if not (crontab.n_existing.endswith('\r') or crontab.n_existing.endswith('\n')):
changed = True
res_args = dict(
jobs=crontab.get_jobnames(),
envs=crontab.get_envnames(),
warnings=warnings,
changed=changed
)
if changed:
if not module.check_mode:
crontab.write()
if module._diff:
diff['after'] = crontab.render()
if crontab.cron_file:
diff['after_header'] = crontab.cron_file
else:
if crontab.user:
diff['after_header'] = 'crontab for user "%s"' % crontab.user
else:
diff['after_header'] = 'crontab'
res_args['diff'] = diff
# retain the backup only if crontab or cron file have changed
if backup and not module.check_mode:
if changed:
res_args['backup_file'] = backup_file
else:
os.unlink(backup_file)
if cron_file:
res_args['cron_file'] = cron_file
module.exit_json(**res_args)
# --- should never get here
module.exit_json(msg="Unable to execute cron task.")
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,132 |
cron contains deprecated call to be removed in 2.12
|
##### SUMMARY
cron contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/modules/cron.py:626:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/modules/cron.py:631:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/modules/cron.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74132
|
https://github.com/ansible/ansible/pull/74197
|
d5ce6e6bed91aed3425062d9315f400d31881775
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
| 2021-04-05T20:33:53Z |
python
| 2021-04-08T19:50:27Z |
test/integration/targets/cron/tasks/main.yml
|
- name: add cron task (check mode enabled, cron task not already created)
cron:
name: test cron task
job: 'date > {{ remote_dir }}/cron_canary1'
check_mode: yes
register: check_mode_enabled_state_present
- assert:
that: check_mode_enabled_state_present is changed
- name: add cron task (check mode disabled, task hasn't already been created)
cron:
name: test cron task
job: 'date > {{ remote_dir }}/cron_canary1'
register: add_cron_task
- assert:
that: add_cron_task is changed
- name: add cron task (check mode enabled, cron task already exists)
cron:
name: test cron task
job: 'date > {{ remote_dir }}/cron_canary1'
check_mode: yes
register: check_mode_enabled_state_present_cron_task_already_exists
- assert:
that: check_mode_enabled_state_present_cron_task_already_exists is not changed
- name: add cron task (check mode disabled, cron task already created)
cron:
name: test cron task
job: 'date > {{ remote_dir }}/cron_canary1'
register: cron_task_already_created
- assert:
that: cron_task_already_created is not changed
- block:
- name: wait for canary creation
wait_for:
path: '{{ remote_dir }}/cron_canary1'
timeout: '{{ 20 if faketime_pkg else 70 }}'
register: wait_canary
always:
- name: display some logs in case of failure
command: 'journalctl -u {{ cron_service }}'
when: wait_canary is failed and ansible_service_mgr == 'systemd'
- debug:
msg: 'elapsed time waiting for canary: {{ wait_canary.elapsed }}'
- name: Check check_mode
cron:
name: test cron task
job: 'date > {{ remote_dir }}/cron_canary1'
state: absent
check_mode: yes
register: check_check_mode
- assert:
that: check_check_mode is changed
- name: Remove a cron task
cron:
name: test cron task
job: 'date > {{ remote_dir }}/cron_canary1'
state: absent
register: remove_task
- assert:
that: remove_task is changed
- name: 'cron task missing: check idempotence (check mode enabled, state=absent)'
cron:
name: test cron task
job: 'date > {{ remote_dir }}/cron_canary1'
state: absent
register: check_mode_enabled_remove_task_idempotence
- assert:
that: check_mode_enabled_remove_task_idempotence is not changed
- name: 'cron task missing: check idempotence (check mode disabled, state=absent)'
cron:
name: test cron task
job: 'date > {{ remote_dir }}/cron_canary1'
state: absent
register: remove_task_idempotence
- assert:
that: remove_task_idempotence is not changed
- name: Check that removing a cron task with cron_file and without specifying an user is allowed (#58493)
cron:
cron_file: unexistent_cron_file
state: absent
register: remove_cron_file
- assert:
that: remove_cron_file is not changed
- name: Non regression test - cron file should not be empty after adding var (#71207)
when: ansible_distribution != 'Alpine'
block:
- name: Cron file creation
cron:
cron_file: cron_filename
name: "simple cron job"
job: 'echo "_o/"'
user: root
- name: Add var to the cron file
cron:
cron_file: cron_filename
env: yes
name: FOO
value: bar
user: root
- name: "Ensure cron_file still contains job string"
replace:
path: /etc/cron.d/cron_filename
regexp: "_o/"
replace: "OK"
register: find_chars
failed_when: (find_chars is not changed) or (find_chars is failed)
# BusyBox does not have /etc/cron.d
- name: Removing a cron file when the name is specified is allowed (#57471)
when: ansible_distribution != 'Alpine'
block:
- name: Cron file creation
cron:
cron_file: cron_filename
name: "integration test cron"
job: 'ls'
user: root
- name: Cron file deletion
cron:
cron_file: cron_filename
name: "integration test cron"
state: absent
- name: Check file succesfull deletion
stat:
path: /etc/cron.d/cron_filename
register: cron_file_stats
- assert:
that: not cron_file_stats.stat.exists
- name: Allow non-ascii chars in job (#69492)
when: ansible_distribution != 'Alpine'
block:
- name: Cron file creation
cron:
cron_file: cron_filename
name: "cron job that contain non-ascii chars in job (これは日本語です; This is Japanese)"
job: 'echo "うどんは好きだがお化け👻は苦手である。"'
user: root
- name: "Ensure cron_file contains job string"
replace:
path: /etc/cron.d/cron_filename
regexp: "うどんは好きだがお化け👻は苦手である。"
replace: "それは機密情報🔓です。"
register: find_chars
failed_when: (find_chars is not changed) or (find_chars is failed)
- name: Cron file deletion
cron:
cron_file: cron_filename
name: "cron job that contain non-ascii chars in job (これは日本語です; This is Japanese)"
state: absent
- name: Check file succesfull deletion
stat:
path: /etc/cron.d/cron_filename
register: cron_file_stats
- assert:
that: not cron_file_stats.stat.exists
- name: Allow non-ascii chars in cron_file (#69492)
when: ansible_distribution != 'Alpine'
block:
- name: Cron file creation with non-ascii filename (これは日本語です; This is Japanese)
cron:
cron_file: 'なせば大抵なんとかなる👊'
name: "integration test cron"
job: 'echo "Hello, ansible!"'
user: root
- name: Check file exists
stat:
path: "/etc/cron.d/なせば大抵なんとかなる👊"
register: cron_file_stats
- assert:
that: cron_file_stats.stat.exists
- name: Cron file deletion
cron:
cron_file: 'なせば大抵なんとかなる👊'
name: "integration test cron"
state: absent
- name: Check file succesfull deletion
stat:
path: "/etc/cron.d/なせば大抵なんとかなる👊"
register: cron_file_stats
- assert:
that: not cron_file_stats.stat.exists
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,132 |
cron contains deprecated call to be removed in 2.12
|
##### SUMMARY
cron contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/modules/cron.py:626:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/modules/cron.py:631:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/modules/cron.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74132
|
https://github.com/ansible/ansible/pull/74197
|
d5ce6e6bed91aed3425062d9315f400d31881775
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
| 2021-04-05T20:33:53Z |
python
| 2021-04-08T19:50:27Z |
test/sanity/ignore.txt
|
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
lib/ansible/cli/console.py pylint:blacklisted-name
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/cli/scripts/ansible_cli_stub.py pylint:ansible-deprecated-version
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/pycompat24.py no-get-exception
lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py no-basestring
lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
lib/ansible/module_utils/urls.py pylint:blacklisted-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
lib/ansible/modules/async_wrapper.py use-argspec-type-path
lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
lib/ansible/modules/copy.py pylint:blacklisted-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
lib/ansible/modules/cron.py pylint:ansible-deprecated-version
lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
lib/ansible/modules/git.py pylint:blacklisted-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/iptables.py pylint:blacklisted-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/pip.py pylint:blacklisted-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/uri.py pylint:blacklisted-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:blacklisted-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/__init__.py pylint:ansible-deprecated-version
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/conditional.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/play_context.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/async_status.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/inventory/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/script.py pylint:ansible-deprecated-version
lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
lib/ansible/plugins/strategy/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
lib/ansible/vars/fact_cache.py pylint:ansible-deprecated-version
lib/ansible/vars/hostvars.py pylint:blacklisted-name
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/lib/ansible_test/_data/setup/windows-httptester.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-argument-not-valid-type
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/integration/plugins/module_utils/database.py future-import-boilerplate
test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
test/support/integration/plugins/modules/docker_swarm.py pylint:ansible-deprecated-version
test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:blacklisted-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
test/units/modules/test_apt.py pylint:blacklisted-name
test/units/parsing/vault/test_vault.py pylint:blacklisted-name
test/units/playbook/role/test_role.py pylint:blacklisted-name
test/units/plugins/test_plugins.py pylint:blacklisted-name
test/units/template/test_templar.py pylint:blacklisted-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
test/utils/shippable/check_matrix.py replace-urlopen
test/utils/shippable/timing.py shebang
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,120 |
Python 2.6 unit and sanity test failures during SNI brownouts
|
### Summary
I know there is some ongoing work to mitigate these problems, but since I'm not aware which parts have already been addressed and which not, I want to share some common failures here which I've seen during the last days:
1. When running `ansible-test sanity --color -v --junit --coverage-check --changed --docker --base-branch origin/main --skip-test pylint --skip-test ansible-doc --skip-test validate-modules --allow-disabled` in CI, the Python 2.6 `import` test fails:
```
02:05 ERROR: Command "/root/ansible_collections/community/general/tests/output/.tmp/sanity/import/minimal-py26/bin/python /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
02:05 >>> Standard Error
02:05 Could not find a version that satisfies the requirement coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2)) (from versions: )
02:05 No matching distribution found for coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
02:05 >>> Standard Output
02:05 Collecting coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
```
(See https://app.shippable.com/github/ansible-collections/community.general/runs/8257/1/console for the full log)
2. When running unit tests for Python 2.6 with `ansible-test units -v --color --docker --coverage` with `unittest2 ; python_version < '2.7'` in tests/unit/requirements.txt, it fails:
```
Run command: /usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt
Collecting unittest2 (from -r tests/unit/requirements.txt (line 1))
Could not find a version that satisfies the requirement unittest2 (from -r tests/unit/requirements.txt (line 1)) (from versions: )
No matching distribution found for unittest2 (from -r tests/unit/requirements.txt (line 1))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
```
(See https://github.com/ansible-collections/community.hrobot/runs/2264772861?check_suite_focus=true)
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
...
|
https://github.com/ansible/ansible/issues/74120
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-04-05T08:22:07Z |
python
| 2021-04-08T22:47:23Z |
changelogs/fragments/ansible-test-pypi-test-container.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,120 |
Python 2.6 unit and sanity test failures during SNI brownouts
|
### Summary
I know there is some ongoing work to mitigate these problems, but since I'm not aware which parts have already been addressed and which not, I want to share some common failures here which I've seen during the last days:
1. When running `ansible-test sanity --color -v --junit --coverage-check --changed --docker --base-branch origin/main --skip-test pylint --skip-test ansible-doc --skip-test validate-modules --allow-disabled` in CI, the Python 2.6 `import` test fails:
```
02:05 ERROR: Command "/root/ansible_collections/community/general/tests/output/.tmp/sanity/import/minimal-py26/bin/python /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
02:05 >>> Standard Error
02:05 Could not find a version that satisfies the requirement coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2)) (from versions: )
02:05 No matching distribution found for coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
02:05 >>> Standard Output
02:05 Collecting coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
```
(See https://app.shippable.com/github/ansible-collections/community.general/runs/8257/1/console for the full log)
2. When running unit tests for Python 2.6 with `ansible-test units -v --color --docker --coverage` with `unittest2 ; python_version < '2.7'` in tests/unit/requirements.txt, it fails:
```
Run command: /usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt
Collecting unittest2 (from -r tests/unit/requirements.txt (line 1))
Could not find a version that satisfies the requirement unittest2 (from -r tests/unit/requirements.txt (line 1)) (from versions: )
No matching distribution found for unittest2 (from -r tests/unit/requirements.txt (line 1))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
```
(See https://github.com/ansible-collections/community.hrobot/runs/2264772861?check_suite_focus=true)
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
...
|
https://github.com/ansible/ansible/issues/74120
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-04-05T08:22:07Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_data/quiet_pip.py
|
"""Custom entry-point for pip that filters out unwanted logging and warnings."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import logging
import re
import runpy
import warnings
BUILTIN_FILTERER_FILTER = logging.Filterer.filter
LOGGING_MESSAGE_FILTER = re.compile("^("
".*Running pip install with root privileges is generally not a good idea.*|" # custom Fedora patch [1]
"DEPRECATION: Python 2.7 will reach the end of its life .*|" # pip 19.2.3
"Ignoring .*: markers .* don't match your environment|"
"Requirement already satisfied.*"
")$")
# [1] https://src.fedoraproject.org/rpms/python-pip/blob/master/f/emit-a-warning-when-running-with-root-privileges.patch
WARNING_MESSAGE_FILTERS = (
# DEPRECATION: Python 2.6 is no longer supported by the Python core team, please upgrade your Python.
# A future version of pip will drop support for Python 2.6
'Python 2.6 is no longer supported by the Python core team, ',
# {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:137: InsecurePlatformWarning:
# A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail.
# You can upgrade to a newer version of Python to solve this.
# For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings
'A true SSLContext object is not available. ',
# {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:339: SNIMissingWarning:
# An HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform.
# This may cause the server to present an incorrect TLS certificate, which can cause validation failures.
# You can upgrade to a newer version of Python to solve this.
# For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings
'An HTTPS request has been made, but the SNI ',
# DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained.
# pip 21.0 will drop support for Python 2.7 in January 2021.
# More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support
'DEPRECATION: Python 2.7 reached the end of its life ',
# DEPRECATION: Python 3.5 reached the end of its life on September 13th, 2020. Please upgrade your Python as Python 3.5 is no longer maintained.
# pip 21.0 will drop support for Python 3.5 in January 2021. pip 21.0 will remove support for this functionality.
'DEPRECATION: Python 3.5 reached the end of its life ',
)
def custom_filterer_filter(self, record):
"""Globally omit logging of unwanted messages."""
if LOGGING_MESSAGE_FILTER.search(record.getMessage()):
return 0
return BUILTIN_FILTERER_FILTER(self, record)
def main():
"""Main program entry point."""
# Filtering logging output globally avoids having to intercept stdout/stderr.
# It also avoids problems with loss of color output and mixing up the order of stdout/stderr messages.
logging.Filterer.filter = custom_filterer_filter
for message_filter in WARNING_MESSAGE_FILTERS:
# Setting filterwarnings in code is necessary because of the following:
# Python 2.6 does not support the PYTHONWARNINGS environment variable. It does support the -W option.
# Python 2.7 cannot use the -W option to match warning text after a colon. This makes it impossible to match specific warning messages.
warnings.filterwarnings('ignore', message_filter)
runpy.run_module('pip.__main__', run_name='__main__', alter_sys=True)
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,120 |
Python 2.6 unit and sanity test failures during SNI brownouts
|
### Summary
I know there is some ongoing work to mitigate these problems, but since I'm not aware which parts have already been addressed and which not, I want to share some common failures here which I've seen during the last days:
1. When running `ansible-test sanity --color -v --junit --coverage-check --changed --docker --base-branch origin/main --skip-test pylint --skip-test ansible-doc --skip-test validate-modules --allow-disabled` in CI, the Python 2.6 `import` test fails:
```
02:05 ERROR: Command "/root/ansible_collections/community/general/tests/output/.tmp/sanity/import/minimal-py26/bin/python /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
02:05 >>> Standard Error
02:05 Could not find a version that satisfies the requirement coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2)) (from versions: )
02:05 No matching distribution found for coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
02:05 >>> Standard Output
02:05 Collecting coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
```
(See https://app.shippable.com/github/ansible-collections/community.general/runs/8257/1/console for the full log)
2. When running unit tests for Python 2.6 with `ansible-test units -v --color --docker --coverage` with `unittest2 ; python_version < '2.7'` in tests/unit/requirements.txt, it fails:
```
Run command: /usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt
Collecting unittest2 (from -r tests/unit/requirements.txt (line 1))
Could not find a version that satisfies the requirement unittest2 (from -r tests/unit/requirements.txt (line 1)) (from versions: )
No matching distribution found for unittest2 (from -r tests/unit/requirements.txt (line 1))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
```
(See https://github.com/ansible-collections/community.hrobot/runs/2264772861?check_suite_focus=true)
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
...
|
https://github.com/ansible/ansible/issues/74120
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-04-05T08:22:07Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/cli.py
|
"""Test runner for all Ansible tests."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import errno
import os
import sys
# This import should occur as early as possible.
# It must occur before subprocess has been imported anywhere in the current process.
from .init import (
CURRENT_RLIMIT_NOFILE,
)
from . import types as t
from .util import (
ApplicationError,
display,
raw_command,
generate_pip_command,
read_lines_without_comments,
MAXFD,
ANSIBLE_TEST_DATA_ROOT,
)
from .delegation import (
check_delegation_args,
delegate,
)
from .executor import (
command_posix_integration,
command_network_integration,
command_windows_integration,
command_shell,
SUPPORTED_PYTHON_VERSIONS,
ApplicationWarning,
Delegate,
generate_pip_install,
check_startup,
)
from .config import (
PosixIntegrationConfig,
WindowsIntegrationConfig,
NetworkIntegrationConfig,
SanityConfig,
UnitsConfig,
ShellConfig,
)
from .env import (
EnvConfig,
command_env,
configure_timeout,
)
from .sanity import (
command_sanity,
sanity_init,
sanity_get_tests,
)
from .units import (
command_units,
)
from .target import (
find_target_completion,
walk_posix_integration_targets,
walk_network_integration_targets,
walk_windows_integration_targets,
walk_units_targets,
walk_sanity_targets,
)
from .cloud import (
initialize_cloud_plugins,
)
from .core_ci import (
AnsibleCoreCI,
)
from .data import (
data_context,
)
from .util_common import (
get_docker_completion,
get_network_completion,
get_remote_completion,
CommonConfig,
)
from .coverage.combine import (
command_coverage_combine,
)
from .coverage.erase import (
command_coverage_erase,
)
from .coverage.html import (
command_coverage_html,
)
from .coverage.report import (
command_coverage_report,
CoverageReportConfig,
)
from .coverage.xml import (
command_coverage_xml,
)
from .coverage.analyze.targets.generate import (
command_coverage_analyze_targets_generate,
CoverageAnalyzeTargetsGenerateConfig,
)
from .coverage.analyze.targets.expand import (
command_coverage_analyze_targets_expand,
CoverageAnalyzeTargetsExpandConfig,
)
from .coverage.analyze.targets.filter import (
command_coverage_analyze_targets_filter,
CoverageAnalyzeTargetsFilterConfig,
)
from .coverage.analyze.targets.combine import (
command_coverage_analyze_targets_combine,
CoverageAnalyzeTargetsCombineConfig,
)
from .coverage.analyze.targets.missing import (
command_coverage_analyze_targets_missing,
CoverageAnalyzeTargetsMissingConfig,
)
from .coverage import (
COVERAGE_GROUPS,
CoverageConfig,
)
if t.TYPE_CHECKING:
import argparse as argparse_module
def main():
"""Main program function."""
try:
os.chdir(data_context().content.root)
initialize_cloud_plugins()
sanity_init()
args = parse_args()
config = args.config(args) # type: CommonConfig
display.verbosity = config.verbosity
display.truncate = config.truncate
display.redact = config.redact
display.color = config.color
display.info_stderr = config.info_stderr
check_startup()
check_delegation_args(config)
configure_timeout(config)
display.info('RLIMIT_NOFILE: %s' % (CURRENT_RLIMIT_NOFILE,), verbosity=2)
display.info('MAXFD: %d' % MAXFD, verbosity=2)
try:
args.func(config)
delegate_args = None
except Delegate as ex:
# save delegation args for use once we exit the exception handler
delegate_args = (ex.exclude, ex.require, ex.integration_targets)
if delegate_args:
# noinspection PyTypeChecker
delegate(config, *delegate_args)
display.review_warnings()
except ApplicationWarning as ex:
display.warning(u'%s' % ex)
sys.exit(0)
except ApplicationError as ex:
display.error(u'%s' % ex)
sys.exit(1)
except KeyboardInterrupt:
sys.exit(2)
except IOError as ex:
if ex.errno == errno.EPIPE:
sys.exit(3)
raise
def parse_args():
"""Parse command line arguments."""
try:
import argparse
except ImportError:
if '--requirements' not in sys.argv:
raise
# install argparse without using constraints since pip may be too old to support them
# not using the ansible-test requirements file since this install is for sys.executable rather than the delegated python (which may be different)
# argparse has no special requirements, so upgrading pip is not required here
raw_command(generate_pip_install(generate_pip_command(sys.executable), '', packages=['argparse'], use_constraints=False))
import argparse
try:
import argcomplete
except ImportError:
argcomplete = None
if argcomplete:
epilog = 'Tab completion available using the "argcomplete" python package.'
else:
epilog = 'Install the "argcomplete" python package to enable tab completion.'
def key_value_type(value): # type: (str) -> t.Tuple[str, str]
"""Wrapper around key_value."""
return key_value(argparse, value)
parser = argparse.ArgumentParser(epilog=epilog)
common = argparse.ArgumentParser(add_help=False)
common.add_argument('-e', '--explain',
action='store_true',
help='explain commands that would be executed')
common.add_argument('-v', '--verbose',
dest='verbosity',
action='count',
default=0,
help='display more output')
common.add_argument('--color',
metavar='COLOR',
nargs='?',
help='generate color output: %(choices)s',
choices=('yes', 'no', 'auto'),
const='yes',
default='auto')
common.add_argument('--debug',
action='store_true',
help='run ansible commands in debug mode')
# noinspection PyTypeChecker
common.add_argument('--truncate',
dest='truncate',
metavar='COLUMNS',
type=int,
default=display.columns,
help='truncate some long output (0=disabled) (default: auto)')
common.add_argument('--redact',
dest='redact',
action='store_true',
default=True,
help='redact sensitive values in output')
common.add_argument('--no-redact',
dest='redact',
action='store_false',
default=False,
help='show sensitive values in output')
common.add_argument('--check-python',
choices=SUPPORTED_PYTHON_VERSIONS,
help=argparse.SUPPRESS)
test = argparse.ArgumentParser(add_help=False, parents=[common])
test.add_argument('include',
metavar='TARGET',
nargs='*',
help='test the specified target').completer = complete_target
test.add_argument('--include',
metavar='TARGET',
action='append',
help='include the specified target').completer = complete_target
test.add_argument('--exclude',
metavar='TARGET',
action='append',
help='exclude the specified target').completer = complete_target
test.add_argument('--require',
metavar='TARGET',
action='append',
help='require the specified target').completer = complete_target
test.add_argument('--coverage',
action='store_true',
help='analyze code coverage when running tests')
test.add_argument('--coverage-label',
default='',
help='label to include in coverage output file names')
test.add_argument('--coverage-check',
action='store_true',
help='only verify code coverage can be enabled')
test.add_argument('--metadata',
help=argparse.SUPPRESS)
test.add_argument('--base-branch',
help='base branch used for change detection')
add_changes(test, argparse)
add_environments(test)
integration = argparse.ArgumentParser(add_help=False, parents=[test])
integration.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
integration.add_argument('--start-at',
metavar='TARGET',
help='start at the specified target').completer = complete_target
integration.add_argument('--start-at-task',
metavar='TASK',
help='start at the specified task')
integration.add_argument('--tags',
metavar='TAGS',
help='only run plays and tasks tagged with these values')
integration.add_argument('--skip-tags',
metavar='TAGS',
help='only run plays and tasks whose tags do not match these values')
integration.add_argument('--diff',
action='store_true',
help='show diff output')
integration.add_argument('--allow-destructive',
action='store_true',
help='allow destructive tests')
integration.add_argument('--allow-root',
action='store_true',
help='allow tests requiring root when not root')
integration.add_argument('--allow-disabled',
action='store_true',
help='allow tests which have been marked as disabled')
integration.add_argument('--allow-unstable',
action='store_true',
help='allow tests which have been marked as unstable')
integration.add_argument('--allow-unstable-changed',
action='store_true',
help='allow tests which have been marked as unstable when focused changes are detected')
integration.add_argument('--allow-unsupported',
action='store_true',
help='allow tests which have been marked as unsupported')
integration.add_argument('--retry-on-error',
action='store_true',
help='retry failed test with increased verbosity')
integration.add_argument('--continue-on-error',
action='store_true',
help='continue after failed test')
integration.add_argument('--debug-strategy',
action='store_true',
help='run test playbooks using the debug strategy')
integration.add_argument('--changed-all-target',
metavar='TARGET',
default='all',
help='target to run when all tests are needed')
integration.add_argument('--changed-all-mode',
metavar='MODE',
choices=('default', 'include', 'exclude'),
help='include/exclude behavior with --changed-all-target: %(choices)s')
integration.add_argument('--list-targets',
action='store_true',
help='list matching targets instead of running tests')
integration.add_argument('--no-temp-workdir',
action='store_true',
help='do not run tests from a temporary directory (use only for verifying broken tests)')
integration.add_argument('--no-temp-unicode',
action='store_true',
help='avoid unicode characters in temporary directory (use only for verifying broken tests)')
subparsers = parser.add_subparsers(metavar='COMMAND')
subparsers.required = True # work-around for python 3 bug which makes subparsers optional
posix_integration = subparsers.add_parser('integration',
parents=[integration],
help='posix integration tests')
posix_integration.set_defaults(func=command_posix_integration,
targets=walk_posix_integration_targets,
config=PosixIntegrationConfig)
add_extra_docker_options(posix_integration)
add_httptester_options(posix_integration, argparse)
network_integration = subparsers.add_parser('network-integration',
parents=[integration],
help='network integration tests')
network_integration.set_defaults(func=command_network_integration,
targets=walk_network_integration_targets,
config=NetworkIntegrationConfig)
add_extra_docker_options(network_integration, integration=False)
network_integration.add_argument('--platform',
metavar='PLATFORM',
action='append',
help='network platform/version').completer = complete_network_platform
network_integration.add_argument('--platform-collection',
type=key_value_type,
metavar='PLATFORM=COLLECTION',
action='append',
help='collection used to test platform').completer = complete_network_platform_collection
network_integration.add_argument('--platform-connection',
type=key_value_type,
metavar='PLATFORM=CONNECTION',
action='append',
help='connection used to test platform').completer = complete_network_platform_connection
network_integration.add_argument('--inventory',
metavar='PATH',
help='path to inventory used for tests')
network_integration.add_argument('--testcase',
metavar='TESTCASE',
help='limit a test to a specified testcase').completer = complete_network_testcase
windows_integration = subparsers.add_parser('windows-integration',
parents=[integration],
help='windows integration tests')
windows_integration.set_defaults(func=command_windows_integration,
targets=walk_windows_integration_targets,
config=WindowsIntegrationConfig)
add_extra_docker_options(windows_integration, integration=False)
add_httptester_options(windows_integration, argparse)
windows_integration.add_argument('--windows',
metavar='VERSION',
action='append',
help='windows version').completer = complete_windows
windows_integration.add_argument('--inventory',
metavar='PATH',
help='path to inventory used for tests')
units = subparsers.add_parser('units',
parents=[test],
help='unit tests')
units.set_defaults(func=command_units,
targets=walk_units_targets,
config=UnitsConfig)
units.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
units.add_argument('--collect-only',
action='store_true',
help='collect tests but do not execute them')
# noinspection PyTypeChecker
units.add_argument('--num-workers',
type=int,
help='number of workers to use (default: auto)')
units.add_argument('--requirements-mode',
choices=('only', 'skip'),
help=argparse.SUPPRESS)
add_extra_docker_options(units, integration=False)
sanity = subparsers.add_parser('sanity',
parents=[test],
help='sanity tests')
sanity.set_defaults(func=command_sanity,
targets=walk_sanity_targets,
config=SanityConfig)
sanity.add_argument('--test',
metavar='TEST',
action='append',
choices=[test.name for test in sanity_get_tests()],
help='tests to run').completer = complete_sanity_test
sanity.add_argument('--skip-test',
metavar='TEST',
action='append',
choices=[test.name for test in sanity_get_tests()],
help='tests to skip').completer = complete_sanity_test
sanity.add_argument('--allow-disabled',
action='store_true',
help='allow tests to run which are disabled by default')
sanity.add_argument('--list-tests',
action='store_true',
help='list available tests')
sanity.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
sanity.add_argument('--enable-optional-errors',
action='store_true',
help='enable optional errors')
add_lint(sanity)
add_extra_docker_options(sanity, integration=False)
shell = subparsers.add_parser('shell',
parents=[common],
help='open an interactive shell')
shell.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
shell.set_defaults(func=command_shell,
config=ShellConfig)
shell.add_argument('--raw',
action='store_true',
help='direct to shell with no setup')
add_environments(shell)
add_extra_docker_options(shell)
add_httptester_options(shell, argparse)
coverage_common = argparse.ArgumentParser(add_help=False, parents=[common])
add_environments(coverage_common, isolated_delegation=False)
coverage = subparsers.add_parser('coverage',
help='code coverage management and reporting')
coverage_subparsers = coverage.add_subparsers(metavar='COMMAND')
coverage_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
add_coverage_analyze(coverage_subparsers, coverage_common)
coverage_combine = coverage_subparsers.add_parser('combine',
parents=[coverage_common],
help='combine coverage data and rewrite remote paths')
coverage_combine.set_defaults(func=command_coverage_combine,
config=CoverageConfig)
coverage_combine.add_argument('--export',
help='directory to export combined coverage files to')
add_extra_coverage_options(coverage_combine)
coverage_erase = coverage_subparsers.add_parser('erase',
parents=[coverage_common],
help='erase coverage data files')
coverage_erase.set_defaults(func=command_coverage_erase,
config=CoverageConfig)
coverage_report = coverage_subparsers.add_parser('report',
parents=[coverage_common],
help='generate console coverage report')
coverage_report.set_defaults(func=command_coverage_report,
config=CoverageReportConfig)
coverage_report.add_argument('--show-missing',
action='store_true',
help='show line numbers of statements not executed')
coverage_report.add_argument('--include',
metavar='PAT1,PAT2,...',
help='include only files whose paths match one of these '
'patterns. Accepts shell-style wildcards, which must be '
'quoted.')
coverage_report.add_argument('--omit',
metavar='PAT1,PAT2,...',
help='omit files whose paths match one of these patterns. '
'Accepts shell-style wildcards, which must be quoted.')
add_extra_coverage_options(coverage_report)
coverage_html = coverage_subparsers.add_parser('html',
parents=[coverage_common],
help='generate html coverage report')
coverage_html.set_defaults(func=command_coverage_html,
config=CoverageConfig)
add_extra_coverage_options(coverage_html)
coverage_xml = coverage_subparsers.add_parser('xml',
parents=[coverage_common],
help='generate xml coverage report')
coverage_xml.set_defaults(func=command_coverage_xml,
config=CoverageConfig)
add_extra_coverage_options(coverage_xml)
env = subparsers.add_parser('env',
parents=[common],
help='show information about the test environment')
env.set_defaults(func=command_env,
config=EnvConfig)
env.add_argument('--show',
action='store_true',
help='show environment on stdout')
env.add_argument('--dump',
action='store_true',
help='dump environment to disk')
env.add_argument('--list-files',
action='store_true',
help='list files on stdout')
# noinspection PyTypeChecker
env.add_argument('--timeout',
type=int,
metavar='MINUTES',
help='timeout for future ansible-test commands (0 clears)')
if argcomplete:
argcomplete.autocomplete(parser, always_complete_options=False, validator=lambda i, k: True)
args = parser.parse_args()
if args.explain and not args.verbosity:
args.verbosity = 1
if args.color == 'yes':
args.color = True
elif args.color == 'no':
args.color = False
else:
args.color = sys.stdout.isatty()
return args
def key_value(argparse, value): # type: (argparse_module, str) -> t.Tuple[str, str]
"""Type parsing and validation for argparse key/value pairs separated by an '=' character."""
parts = value.split('=')
if len(parts) != 2:
raise argparse.ArgumentTypeError('"%s" must be in the format "key=value"' % value)
return parts[0], parts[1]
# noinspection PyProtectedMember,PyUnresolvedReferences
def add_coverage_analyze(coverage_subparsers, coverage_common): # type: (argparse_module._SubParsersAction, argparse_module.ArgumentParser) -> None
"""Add the `coverage analyze` subcommand."""
analyze = coverage_subparsers.add_parser(
'analyze',
help='analyze collected coverage data',
)
analyze_subparsers = analyze.add_subparsers(metavar='COMMAND')
analyze_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
targets = analyze_subparsers.add_parser(
'targets',
help='analyze integration test target coverage',
)
targets_subparsers = targets.add_subparsers(metavar='COMMAND')
targets_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
targets_generate = targets_subparsers.add_parser(
'generate',
parents=[coverage_common],
help='aggregate coverage by integration test target',
)
targets_generate.set_defaults(
func=command_coverage_analyze_targets_generate,
config=CoverageAnalyzeTargetsGenerateConfig,
)
targets_generate.add_argument(
'input_dir',
nargs='?',
help='directory to read coverage from',
)
targets_generate.add_argument(
'output_file',
help='output file for aggregated coverage',
)
targets_expand = targets_subparsers.add_parser(
'expand',
parents=[coverage_common],
help='expand target names from integers in aggregated coverage',
)
targets_expand.set_defaults(
func=command_coverage_analyze_targets_expand,
config=CoverageAnalyzeTargetsExpandConfig,
)
targets_expand.add_argument(
'input_file',
help='input file to read aggregated coverage from',
)
targets_expand.add_argument(
'output_file',
help='output file to write expanded coverage to',
)
targets_filter = targets_subparsers.add_parser(
'filter',
parents=[coverage_common],
help='filter aggregated coverage data',
)
targets_filter.set_defaults(
func=command_coverage_analyze_targets_filter,
config=CoverageAnalyzeTargetsFilterConfig,
)
targets_filter.add_argument(
'input_file',
help='input file to read aggregated coverage from',
)
targets_filter.add_argument(
'output_file',
help='output file to write expanded coverage to',
)
targets_filter.add_argument(
'--include-target',
dest='include_targets',
action='append',
help='include the specified targets',
)
targets_filter.add_argument(
'--exclude-target',
dest='exclude_targets',
action='append',
help='exclude the specified targets',
)
targets_filter.add_argument(
'--include-path',
help='include paths matching the given regex',
)
targets_filter.add_argument(
'--exclude-path',
help='exclude paths matching the given regex',
)
targets_combine = targets_subparsers.add_parser(
'combine',
parents=[coverage_common],
help='combine multiple aggregated coverage files',
)
targets_combine.set_defaults(
func=command_coverage_analyze_targets_combine,
config=CoverageAnalyzeTargetsCombineConfig,
)
targets_combine.add_argument(
'input_file',
nargs='+',
help='input file to read aggregated coverage from',
)
targets_combine.add_argument(
'output_file',
help='output file to write aggregated coverage to',
)
targets_missing = targets_subparsers.add_parser(
'missing',
parents=[coverage_common],
help='identify coverage in one file missing in another',
)
targets_missing.set_defaults(
func=command_coverage_analyze_targets_missing,
config=CoverageAnalyzeTargetsMissingConfig,
)
targets_missing.add_argument(
'from_file',
help='input file containing aggregated coverage',
)
targets_missing.add_argument(
'to_file',
help='input file containing aggregated coverage',
)
targets_missing.add_argument(
'output_file',
help='output file to write aggregated coverage to',
)
targets_missing.add_argument(
'--only-gaps',
action='store_true',
help='report only arcs/lines not hit by any target',
)
targets_missing.add_argument(
'--only-exists',
action='store_true',
help='limit results to files that exist',
)
def add_lint(parser):
"""
:type parser: argparse.ArgumentParser
"""
parser.add_argument('--lint',
action='store_true',
help='write lint output to stdout, everything else stderr')
parser.add_argument('--junit',
action='store_true',
help='write test failures to junit xml files')
parser.add_argument('--failure-ok',
action='store_true',
help='exit successfully on failed tests after saving results')
def add_changes(parser, argparse):
"""
:type parser: argparse.ArgumentParser
:type argparse: argparse
"""
parser.add_argument('--changed', action='store_true', help='limit targets based on changes')
changes = parser.add_argument_group(title='change detection arguments')
changes.add_argument('--tracked', action='store_true', help=argparse.SUPPRESS)
changes.add_argument('--untracked', action='store_true', help='include untracked files')
changes.add_argument('--ignore-committed', dest='committed', action='store_false', help='exclude committed files')
changes.add_argument('--ignore-staged', dest='staged', action='store_false', help='exclude staged files')
changes.add_argument('--ignore-unstaged', dest='unstaged', action='store_false', help='exclude unstaged files')
changes.add_argument('--changed-from', metavar='PATH', help=argparse.SUPPRESS)
changes.add_argument('--changed-path', metavar='PATH', action='append', help=argparse.SUPPRESS)
def add_environments(parser, isolated_delegation=True):
"""
:type parser: argparse.ArgumentParser
:type isolated_delegation: bool
"""
parser.add_argument('--requirements',
action='store_true',
help='install command requirements')
parser.add_argument('--python-interpreter',
metavar='PATH',
default=None,
help='path to the docker or remote python interpreter')
parser.add_argument('--no-pip-check',
dest='pip_check',
default=True,
action='store_false',
help='do not run "pip check" to verify requirements')
environments = parser.add_mutually_exclusive_group()
environments.add_argument('--local',
action='store_true',
help='run from the local environment')
environments.add_argument('--venv',
action='store_true',
help='run from ansible-test managed virtual environments')
venv = parser.add_argument_group(title='venv arguments')
venv.add_argument('--venv-system-site-packages',
action='store_true',
help='enable system site packages')
if not isolated_delegation:
environments.set_defaults(
docker=None,
remote=None,
remote_stage=None,
remote_provider=None,
remote_terminate=None,
remote_endpoint=None,
python_interpreter=None,
)
return
environments.add_argument('--docker',
metavar='IMAGE',
nargs='?',
default=None,
const='default',
help='run from a docker container').completer = complete_docker
environments.add_argument('--remote',
metavar='PLATFORM',
default=None,
help='run from a remote instance').completer = complete_remote_shell if parser.prog.endswith(' shell') else complete_remote
remote = parser.add_argument_group(title='remote arguments')
remote.add_argument('--remote-stage',
metavar='STAGE',
help='remote stage to use: prod, dev',
default='prod').completer = complete_remote_stage
remote.add_argument('--remote-provider',
metavar='PROVIDER',
help='remote provider to use: %(choices)s',
choices=['default'] + sorted(AnsibleCoreCI.PROVIDERS.keys()),
default='default')
remote.add_argument('--remote-endpoint',
metavar='ENDPOINT',
help='remote provisioning endpoint to use (default: auto)',
default=None)
remote.add_argument('--remote-terminate',
metavar='WHEN',
help='terminate remote instance: %(choices)s (default: %(default)s)',
choices=['never', 'always', 'success'],
default='never')
def add_extra_coverage_options(parser):
"""
:type parser: argparse.ArgumentParser
"""
parser.add_argument('--group-by',
metavar='GROUP',
action='append',
choices=COVERAGE_GROUPS,
help='group output by: %s' % ', '.join(COVERAGE_GROUPS))
parser.add_argument('--all',
action='store_true',
help='include all python/powershell source files')
parser.add_argument('--stub',
action='store_true',
help='generate empty report of all python/powershell source files')
def add_httptester_options(parser, argparse):
"""
:type parser: argparse.ArgumentParser
:type argparse: argparse
"""
group = parser.add_mutually_exclusive_group()
group.add_argument('--httptester',
metavar='IMAGE',
default='quay.io/ansible/http-test-container:1.3.0',
help='docker image to use for the httptester container')
group.add_argument('--disable-httptester',
dest='httptester',
action='store_const',
const='',
help='do not use the httptester container')
parser.add_argument('--inject-httptester',
action='store_true',
help=argparse.SUPPRESS) # internal use only
parser.add_argument('--httptester-krb5-password',
help=argparse.SUPPRESS) # internal use only
def add_extra_docker_options(parser, integration=True):
"""
:type parser: argparse.ArgumentParser
:type integration: bool
"""
docker = parser.add_argument_group(title='docker arguments')
docker.add_argument('--docker-no-pull',
action='store_false',
dest='docker_pull',
help='do not explicitly pull the latest docker images')
if data_context().content.is_ansible:
docker.add_argument('--docker-keep-git',
action='store_true',
help='transfer git related files into the docker container')
else:
docker.set_defaults(
docker_keep_git=False,
)
docker.add_argument('--docker-seccomp',
metavar='SC',
choices=('default', 'unconfined'),
default=None,
help='set seccomp confinement for the test container: %(choices)s')
docker.add_argument('--docker-terminate',
metavar='WHEN',
help='terminate docker container: %(choices)s (default: %(default)s)',
choices=['never', 'always', 'success'],
default='always')
if not integration:
return
docker.add_argument('--docker-privileged',
action='store_true',
help='run docker container in privileged mode')
docker.add_argument('--docker-network',
help='run using the specified docker network')
# noinspection PyTypeChecker
docker.add_argument('--docker-memory',
help='memory limit for docker in bytes', type=int)
# noinspection PyUnusedLocal
def complete_remote_stage(prefix, parsed_args, **_): # pylint: disable=unused-argument
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
return [stage for stage in ('prod', 'dev') if stage.startswith(prefix)]
def complete_target(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
return find_target_completion(parsed_args.targets, prefix)
# noinspection PyUnusedLocal
def complete_remote(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
del parsed_args
images = sorted(get_remote_completion().keys())
return [i for i in images if i.startswith(prefix)]
# noinspection PyUnusedLocal
def complete_remote_shell(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
del parsed_args
images = sorted(get_remote_completion().keys())
# 2008 doesn't support SSH so we do not add to the list of valid images
windows_completion_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', 'windows.txt')
images.extend(["windows/%s" % i for i in read_lines_without_comments(windows_completion_path, remove_blank_lines=True) if i != '2008'])
return [i for i in images if i.startswith(prefix)]
# noinspection PyUnusedLocal
def complete_docker(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
del parsed_args
images = sorted(get_docker_completion().keys())
return [i for i in images if i.startswith(prefix)]
def complete_windows(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
images = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', 'windows.txt'), remove_blank_lines=True)
return [i for i in images if i.startswith(prefix) and (not parsed_args.windows or i not in parsed_args.windows)]
def complete_network_platform(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
images = sorted(get_network_completion())
return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)]
def complete_network_platform_collection(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
left = prefix.split('=')[0]
images = sorted(set(image.split('/')[0] for image in get_network_completion()))
return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])]
def complete_network_platform_connection(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
left = prefix.split('=')[0]
images = sorted(set(image.split('/')[0] for image in get_network_completion()))
return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])]
def complete_network_testcase(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
testcases = []
# since testcases are module specific, don't autocomplete if more than one
# module is specidied
if len(parsed_args.include) != 1:
return []
test_dir = os.path.join(data_context().content.integration_targets_path, parsed_args.include[0], 'tests')
connection_dirs = data_context().content.get_dirs(test_dir)
for connection_dir in connection_dirs:
for testcase in [os.path.basename(path) for path in data_context().content.get_files(connection_dir)]:
if testcase.startswith(prefix):
testcases.append(testcase.split('.')[0])
return testcases
# noinspection PyUnusedLocal
def complete_sanity_test(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
del parsed_args
tests = sorted(test.name for test in sanity_get_tests())
return [i for i in tests if i.startswith(prefix)]
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,120 |
Python 2.6 unit and sanity test failures during SNI brownouts
|
### Summary
I know there is some ongoing work to mitigate these problems, but since I'm not aware which parts have already been addressed and which not, I want to share some common failures here which I've seen during the last days:
1. When running `ansible-test sanity --color -v --junit --coverage-check --changed --docker --base-branch origin/main --skip-test pylint --skip-test ansible-doc --skip-test validate-modules --allow-disabled` in CI, the Python 2.6 `import` test fails:
```
02:05 ERROR: Command "/root/ansible_collections/community/general/tests/output/.tmp/sanity/import/minimal-py26/bin/python /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
02:05 >>> Standard Error
02:05 Could not find a version that satisfies the requirement coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2)) (from versions: )
02:05 No matching distribution found for coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
02:05 >>> Standard Output
02:05 Collecting coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
```
(See https://app.shippable.com/github/ansible-collections/community.general/runs/8257/1/console for the full log)
2. When running unit tests for Python 2.6 with `ansible-test units -v --color --docker --coverage` with `unittest2 ; python_version < '2.7'` in tests/unit/requirements.txt, it fails:
```
Run command: /usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt
Collecting unittest2 (from -r tests/unit/requirements.txt (line 1))
Could not find a version that satisfies the requirement unittest2 (from -r tests/unit/requirements.txt (line 1)) (from versions: )
No matching distribution found for unittest2 (from -r tests/unit/requirements.txt (line 1))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
```
(See https://github.com/ansible-collections/community.hrobot/runs/2264772861?check_suite_focus=true)
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
...
|
https://github.com/ansible/ansible/issues/74120
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-04-05T08:22:07Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/config.py
|
"""Configuration classes."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
from . import types as t
from .util import (
find_python,
generate_password,
generate_pip_command,
ApplicationError,
)
from .util_common import (
docker_qualify_image,
get_docker_completion,
get_remote_completion,
CommonConfig,
)
from .metadata import (
Metadata,
)
from .data import (
data_context,
)
try:
# noinspection PyTypeChecker
TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound='IntegrationConfig')
except AttributeError:
TIntegrationConfig = None # pylint: disable=invalid-name
class ParsedRemote:
"""A parsed version of a "remote" string."""
def __init__(self, arch, platform, version): # type: (t.Optional[str], str, str) -> None
self.arch = arch
self.platform = platform
self.version = version
@staticmethod
def parse(value): # type: (str) -> t.Optional['ParsedRemote']
"""Return a ParsedRemote from the given value or None if the syntax is invalid."""
parts = value.split('/')
if len(parts) == 2:
arch = None
platform, version = parts
elif len(parts) == 3:
arch, platform, version = parts
else:
return None
return ParsedRemote(arch, platform, version)
class EnvironmentConfig(CommonConfig):
"""Configuration common to all commands which execute in an environment."""
def __init__(self, args, command):
"""
:type args: any
:type command: str
"""
super(EnvironmentConfig, self).__init__(args, command)
self.local = args.local is True
self.venv = args.venv
self.venv_system_site_packages = args.venv_system_site_packages
self.python = args.python if 'python' in args else None # type: str
self.docker = docker_qualify_image(args.docker) # type: str
self.docker_raw = args.docker # type: str
self.remote = args.remote # type: str
if self.remote:
self.parsed_remote = ParsedRemote.parse(self.remote)
if not self.parsed_remote or not self.parsed_remote.platform or not self.parsed_remote.version:
raise ApplicationError('Unrecognized remote "%s" syntax. Use "platform/version" or "arch/platform/version".' % self.remote)
else:
self.parsed_remote = None
self.docker_privileged = args.docker_privileged if 'docker_privileged' in args else False # type: bool
self.docker_pull = args.docker_pull if 'docker_pull' in args else False # type: bool
self.docker_keep_git = args.docker_keep_git if 'docker_keep_git' in args else False # type: bool
self.docker_seccomp = args.docker_seccomp if 'docker_seccomp' in args else None # type: str
self.docker_memory = args.docker_memory if 'docker_memory' in args else None
self.docker_terminate = args.docker_terminate if 'docker_terminate' in args else None # type: str
self.docker_network = args.docker_network if 'docker_network' in args else None # type: str
if self.docker_seccomp is None:
self.docker_seccomp = get_docker_completion().get(self.docker_raw, {}).get('seccomp', 'default')
self.remote_stage = args.remote_stage # type: str
self.remote_provider = args.remote_provider # type: str
self.remote_endpoint = args.remote_endpoint # type: t.Optional[str]
self.remote_terminate = args.remote_terminate # type: str
if self.remote_provider == 'default':
self.remote_provider = None
self.requirements = args.requirements # type: bool
if self.python == 'default':
self.python = None
actual_major_minor = '.'.join(str(i) for i in sys.version_info[:2])
self.python_version = self.python or actual_major_minor
self.python_interpreter = args.python_interpreter
self.pip_check = args.pip_check
self.delegate = self.docker or self.remote or self.venv
self.delegate_args = [] # type: t.List[str]
if self.delegate:
self.requirements = True
self.inject_httptester = args.inject_httptester if 'inject_httptester' in args else False # type: bool
self.httptester = docker_qualify_image(args.httptester if 'httptester' in args else '') # type: str
krb5_password = args.httptester_krb5_password if 'httptester_krb5_password' in args else ''
self.httptester_krb5_password = krb5_password or generate_password() # type: str
if self.get_delegated_completion().get('httptester', 'enabled') == 'disabled':
self.httptester = False
if self.get_delegated_completion().get('pip-check', 'enabled') == 'disabled':
self.pip_check = False
if args.check_python and args.check_python != actual_major_minor:
raise ApplicationError('Running under Python %s instead of Python %s as expected.' % (actual_major_minor, args.check_python))
if self.docker_keep_git:
def git_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""Add files from the content root .git directory to the payload file list."""
for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
paths = [os.path.join(dirpath, filename) for filename in filenames]
files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
data_context().register_payload_callback(git_callback)
@property
def python_executable(self):
"""
:rtype: str
"""
return find_python(self.python_version)
@property
def pip_command(self):
"""
:rtype: list[str]
"""
return generate_pip_command(self.python_executable)
def get_delegated_completion(self):
"""Returns a dictionary of settings specific to the selected delegation system, if any. Otherwise returns an empty dictionary.
:rtype: dict[str, str]
"""
if self.docker:
return get_docker_completion().get(self.docker_raw, {})
if self.remote:
return get_remote_completion().get(self.remote, {})
return {}
class TestConfig(EnvironmentConfig):
"""Configuration common to all test commands."""
def __init__(self, args, command):
"""
:type args: any
:type command: str
"""
super(TestConfig, self).__init__(args, command)
self.coverage = args.coverage # type: bool
self.coverage_label = args.coverage_label # type: str
self.coverage_check = args.coverage_check # type: bool
self.coverage_config_base_path = None # type: t.Optional[str]
self.include = args.include or [] # type: t.List[str]
self.exclude = args.exclude or [] # type: t.List[str]
self.require = args.require or [] # type: t.List[str]
self.changed = args.changed # type: bool
self.tracked = args.tracked # type: bool
self.untracked = args.untracked # type: bool
self.committed = args.committed # type: bool
self.staged = args.staged # type: bool
self.unstaged = args.unstaged # type: bool
self.changed_from = args.changed_from # type: str
self.changed_path = args.changed_path # type: t.List[str]
self.base_branch = args.base_branch # type: str
self.lint = args.lint if 'lint' in args else False # type: bool
self.junit = args.junit if 'junit' in args else False # type: bool
self.failure_ok = args.failure_ok if 'failure_ok' in args else False # type: bool
self.metadata = Metadata.from_file(args.metadata) if args.metadata else Metadata()
self.metadata_path = None
if self.coverage_check:
self.coverage = True
def metadata_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""Add the metadata file to the payload file list."""
config = self
if self.metadata_path:
files.append((os.path.abspath(config.metadata_path), config.metadata_path))
data_context().register_payload_callback(metadata_callback)
class ShellConfig(EnvironmentConfig):
"""Configuration for the shell command."""
def __init__(self, args):
"""
:type args: any
"""
super(ShellConfig, self).__init__(args, 'shell')
self.raw = args.raw # type: bool
if self.raw:
self.httptester = False
class SanityConfig(TestConfig):
"""Configuration for the sanity command."""
def __init__(self, args):
"""
:type args: any
"""
super(SanityConfig, self).__init__(args, 'sanity')
self.test = args.test # type: t.List[str]
self.skip_test = args.skip_test # type: t.List[str]
self.list_tests = args.list_tests # type: bool
self.allow_disabled = args.allow_disabled # type: bool
self.enable_optional_errors = args.enable_optional_errors # type: bool
self.info_stderr = self.lint
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""
def __init__(self, args, command):
"""
:type args: any
:type command: str
"""
super(IntegrationConfig, self).__init__(args, command)
self.start_at = args.start_at # type: str
self.start_at_task = args.start_at_task # type: str
self.allow_destructive = args.allow_destructive # type: bool
self.allow_root = args.allow_root # type: bool
self.allow_disabled = args.allow_disabled # type: bool
self.allow_unstable = args.allow_unstable # type: bool
self.allow_unstable_changed = args.allow_unstable_changed # type: bool
self.allow_unsupported = args.allow_unsupported # type: bool
self.retry_on_error = args.retry_on_error # type: bool
self.continue_on_error = args.continue_on_error # type: bool
self.debug_strategy = args.debug_strategy # type: bool
self.changed_all_target = args.changed_all_target # type: str
self.changed_all_mode = args.changed_all_mode # type: str
self.list_targets = args.list_targets # type: bool
self.tags = args.tags
self.skip_tags = args.skip_tags
self.diff = args.diff
self.no_temp_workdir = args.no_temp_workdir
self.no_temp_unicode = args.no_temp_unicode
if self.get_delegated_completion().get('temp-unicode', 'enabled') == 'disabled':
self.no_temp_unicode = True
if self.list_targets:
self.explain = True
self.info_stderr = True
def get_ansible_config(self): # type: () -> str
"""Return the path to the Ansible config for the given config."""
ansible_config_relative_path = os.path.join(data_context().content.integration_path, '%s.cfg' % self.command)
ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path)
if not os.path.exists(ansible_config_path):
# use the default empty configuration unless one has been provided
ansible_config_path = super(IntegrationConfig, self).get_ansible_config()
return ansible_config_path
class PosixIntegrationConfig(IntegrationConfig):
"""Configuration for the posix integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(PosixIntegrationConfig, self).__init__(args, 'integration')
class WindowsIntegrationConfig(IntegrationConfig):
"""Configuration for the windows integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(WindowsIntegrationConfig, self).__init__(args, 'windows-integration')
self.windows = args.windows # type: t.List[str]
self.inventory = args.inventory # type: str
if self.windows:
self.allow_destructive = True
class NetworkIntegrationConfig(IntegrationConfig):
"""Configuration for the network integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(NetworkIntegrationConfig, self).__init__(args, 'network-integration')
self.platform = args.platform # type: t.List[str]
self.platform_collection = dict(args.platform_collection or []) # type: t.Dict[str, str]
self.platform_connection = dict(args.platform_connection or []) # type: t.Dict[str, str]
self.inventory = args.inventory # type: str
self.testcase = args.testcase # type: str
class UnitsConfig(TestConfig):
"""Configuration for the units command."""
def __init__(self, args):
"""
:type args: any
"""
super(UnitsConfig, self).__init__(args, 'units')
self.collect_only = args.collect_only # type: bool
self.num_workers = args.num_workers # type: int
self.requirements_mode = args.requirements_mode if 'requirements_mode' in args else ''
if self.requirements_mode == 'only':
self.requirements = True
elif self.requirements_mode == 'skip':
self.requirements = False
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,120 |
Python 2.6 unit and sanity test failures during SNI brownouts
|
### Summary
I know there is some ongoing work to mitigate these problems, but since I'm not aware which parts have already been addressed and which not, I want to share some common failures here which I've seen during the last days:
1. When running `ansible-test sanity --color -v --junit --coverage-check --changed --docker --base-branch origin/main --skip-test pylint --skip-test ansible-doc --skip-test validate-modules --allow-disabled` in CI, the Python 2.6 `import` test fails:
```
02:05 ERROR: Command "/root/ansible_collections/community/general/tests/output/.tmp/sanity/import/minimal-py26/bin/python /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
02:05 >>> Standard Error
02:05 Could not find a version that satisfies the requirement coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2)) (from versions: )
02:05 No matching distribution found for coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
02:05 >>> Standard Output
02:05 Collecting coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
```
(See https://app.shippable.com/github/ansible-collections/community.general/runs/8257/1/console for the full log)
2. When running unit tests for Python 2.6 with `ansible-test units -v --color --docker --coverage` with `unittest2 ; python_version < '2.7'` in tests/unit/requirements.txt, it fails:
```
Run command: /usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt
Collecting unittest2 (from -r tests/unit/requirements.txt (line 1))
Could not find a version that satisfies the requirement unittest2 (from -r tests/unit/requirements.txt (line 1)) (from versions: )
No matching distribution found for unittest2 (from -r tests/unit/requirements.txt (line 1))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
```
(See https://github.com/ansible-collections/community.hrobot/runs/2264772861?check_suite_focus=true)
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
...
|
https://github.com/ansible/ansible/issues/74120
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-04-05T08:22:07Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/delegation.py
|
"""Delegate test execution to another environment."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import sys
import tempfile
from . import types as t
from .io import (
make_dirs,
read_text_file,
)
from .executor import (
SUPPORTED_PYTHON_VERSIONS,
HTTPTESTER_HOSTS,
create_shell_command,
run_httptester,
start_httptester,
get_python_interpreter,
get_python_version,
)
from .config import (
TestConfig,
EnvironmentConfig,
IntegrationConfig,
WindowsIntegrationConfig,
NetworkIntegrationConfig,
ShellConfig,
SanityConfig,
UnitsConfig,
)
from .core_ci import (
AnsibleCoreCI,
SshKey,
)
from .manage_ci import (
ManagePosixCI,
ManageWindowsCI,
get_ssh_key_setup,
)
from .util import (
ApplicationError,
common_environment,
display,
ANSIBLE_BIN_PATH,
ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_ROOT,
tempdir,
)
from .util_common import (
run_command,
ResultType,
create_interpreter_wrapper,
get_docker_completion,
get_remote_completion,
)
from .docker_util import (
docker_exec,
docker_get,
docker_pull,
docker_put,
docker_rm,
docker_run,
docker_available,
docker_network_disconnect,
get_docker_networks,
get_docker_preferred_network_name,
get_docker_hostname,
is_docker_user_defined_network,
)
from .cloud import (
get_cloud_providers,
)
from .target import (
IntegrationTarget,
)
from .data import (
data_context,
)
from .payload import (
create_payload,
)
from .venv import (
create_virtual_environment,
)
from .ci import (
get_ci_provider,
)
def check_delegation_args(args):
"""
:type args: CommonConfig
"""
if not isinstance(args, EnvironmentConfig):
return
if args.docker:
get_python_version(args, get_docker_completion(), args.docker_raw)
elif args.remote:
get_python_version(args, get_remote_completion(), args.remote)
def delegate(args, exclude, require, integration_targets):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
:type integration_targets: tuple[IntegrationTarget]
:rtype: bool
"""
if isinstance(args, TestConfig):
args.metadata.ci_provider = get_ci_provider().code
make_dirs(ResultType.TMP.path)
with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
args.metadata.to_file(args.metadata_path)
try:
return delegate_command(args, exclude, require, integration_targets)
finally:
args.metadata_path = None
else:
return delegate_command(args, exclude, require, integration_targets)
def delegate_command(args, exclude, require, integration_targets):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
:type integration_targets: tuple[IntegrationTarget]
:rtype: bool
"""
if args.venv:
delegate_venv(args, exclude, require, integration_targets)
return True
if args.docker:
delegate_docker(args, exclude, require, integration_targets)
return True
if args.remote:
delegate_remote(args, exclude, require, integration_targets)
return True
return False
def delegate_venv(args, # type: EnvironmentConfig
exclude, # type: t.List[str]
require, # type: t.List[str]
integration_targets, # type: t.Tuple[IntegrationTarget, ...]
): # type: (...) -> None
"""Delegate ansible-test execution to a virtual environment using venv or virtualenv."""
if args.python:
versions = (args.python_version,)
else:
versions = SUPPORTED_PYTHON_VERSIONS
if args.httptester:
needs_httptester = sorted(target.name for target in integration_targets if 'needs/httptester/' in target.aliases)
if needs_httptester:
display.warning('Use --docker or --remote to enable httptester for tests marked "needs/httptester": %s' % ', '.join(needs_httptester))
if args.venv_system_site_packages:
suffix = '-ssp'
else:
suffix = ''
venvs = dict((version, os.path.join(ResultType.TMP.path, 'delegation', 'python%s%s' % (version, suffix))) for version in versions)
venvs = dict((version, path) for version, path in venvs.items() if create_virtual_environment(args, version, path, args.venv_system_site_packages))
if not venvs:
raise ApplicationError('No usable virtual environment support found.')
options = {
'--venv': 0,
'--venv-system-site-packages': 0,
}
with tempdir() as inject_path:
for version, path in venvs.items():
create_interpreter_wrapper(os.path.join(path, 'bin', 'python'), os.path.join(inject_path, 'python%s' % version))
python_interpreter = os.path.join(inject_path, 'python%s' % args.python_version)
cmd = generate_command(args, python_interpreter, ANSIBLE_BIN_PATH, data_context().content.root, options, exclude, require)
if isinstance(args, TestConfig):
if args.coverage and not args.coverage_label:
cmd += ['--coverage-label', 'venv']
env = common_environment()
with tempdir() as library_path:
# expose ansible and ansible_test to the virtual environment (only required when running from an install)
os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible'))
os.symlink(ANSIBLE_TEST_ROOT, os.path.join(library_path, 'ansible_test'))
env.update(
PATH=inject_path + os.path.pathsep + env['PATH'],
PYTHONPATH=library_path,
)
run_command(args, cmd, env=env)
def delegate_docker(args, exclude, require, integration_targets):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
:type integration_targets: tuple[IntegrationTarget]
"""
test_image = args.docker
privileged = args.docker_privileged
if isinstance(args, ShellConfig):
use_httptester = args.httptester
else:
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
if use_httptester:
docker_pull(args, args.httptester)
docker_pull(args, test_image)
httptester_id = None
test_id = None
success = False
options = {
'--docker': 1,
'--docker-privileged': 0,
'--docker-util': 1,
}
python_interpreter = get_python_interpreter(args, get_docker_completion(), args.docker_raw)
pwd = '/root'
ansible_root = os.path.join(pwd, 'ansible')
if data_context().content.collection:
content_root = os.path.join(pwd, data_context().content.collection.directory)
else:
content_root = ansible_root
remote_results_root = os.path.join(content_root, data_context().content.results_path)
cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
if isinstance(args, TestConfig):
if args.coverage and not args.coverage_label:
image_label = args.docker_raw
image_label = re.sub('[^a-zA-Z0-9]+', '-', image_label)
cmd += ['--coverage-label', 'docker-%s' % image_label]
if isinstance(args, IntegrationConfig):
if not args.allow_destructive:
cmd.append('--allow-destructive')
cmd_options = []
if isinstance(args, ShellConfig) or (isinstance(args, IntegrationConfig) and args.debug_strategy):
cmd_options.append('-it')
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
try:
create_payload(args, local_source_fd.name)
if use_httptester:
httptester_id = run_httptester(args)
else:
httptester_id = None
test_options = [
'--detach',
'--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
'--privileged=%s' % str(privileged).lower(),
]
if args.docker_memory:
test_options.extend([
'--memory=%d' % args.docker_memory,
'--memory-swap=%d' % args.docker_memory,
])
docker_socket = '/var/run/docker.sock'
if args.docker_seccomp != 'default':
test_options += ['--security-opt', 'seccomp=%s' % args.docker_seccomp]
if get_docker_hostname() != 'localhost' or os.path.exists(docker_socket):
test_options += ['--volume', '%s:%s' % (docker_socket, docker_socket)]
if httptester_id:
test_options += ['--env', 'HTTPTESTER=1', '--env', 'KRB5_PASSWORD=%s' % args.httptester_krb5_password]
network = get_docker_preferred_network_name(args)
if not is_docker_user_defined_network(network):
# legacy links are required when using the default bridge network instead of user-defined networks
for host in HTTPTESTER_HOSTS:
test_options += ['--link', '%s:%s' % (httptester_id, host)]
if isinstance(args, IntegrationConfig):
cloud_platforms = get_cloud_providers(args)
for cloud_platform in cloud_platforms:
test_options += cloud_platform.get_docker_run_options()
test_id = docker_run(args, test_image, options=test_options)[0]
if args.explain:
test_id = 'test_id'
else:
test_id = test_id.strip()
setup_sh = read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'docker.sh'))
ssh_keys_sh = get_ssh_key_setup(SshKey(args))
setup_sh += ssh_keys_sh
shell = setup_sh.splitlines()[0][2:]
docker_exec(args, test_id, [shell], data=setup_sh)
# write temporary files to /root since /tmp isn't ready immediately on container start
docker_put(args, test_id, local_source_fd.name, '/root/test.tgz')
docker_exec(args, test_id, ['tar', 'oxzf', '/root/test.tgz', '-C', '/root'])
# docker images are only expected to have a single python version available
if isinstance(args, UnitsConfig) and not args.python:
cmd += ['--python', 'default']
# run unit tests unprivileged to prevent stray writes to the source tree
# also disconnect from the network once requirements have been installed
if isinstance(args, UnitsConfig):
writable_dirs = [
os.path.join(content_root, ResultType.JUNIT.relative_path),
os.path.join(content_root, ResultType.COVERAGE.relative_path),
]
docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
docker_exec(args, test_id, ['chmod', '755', '/root'])
docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
docker_exec(args, test_id, ['useradd', 'pytest', '--create-home'])
docker_exec(args, test_id, cmd + ['--requirements-mode', 'only'], options=cmd_options)
networks = get_docker_networks(args, test_id)
if networks is not None:
for network in networks:
docker_network_disconnect(args, test_id, network)
else:
display.warning('Network disconnection is not supported (this is normal under podman). '
'Tests will not be isolated from the network. Network-related tests may misbehave.')
cmd += ['--requirements-mode', 'skip']
cmd_options += ['--user', 'pytest']
try:
docker_exec(args, test_id, cmd, options=cmd_options)
# docker_exec will throw SubprocessError if not successful
# If we make it here, all the prep work earlier and the docker_exec line above were all successful.
success = True
finally:
local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
remote_test_root = os.path.dirname(remote_results_root)
remote_results_name = os.path.basename(remote_results_root)
remote_temp_file = os.path.join('/root', remote_results_name + '.tgz')
make_dirs(local_test_root) # make sure directory exists for collections which have no tests
with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '--exclude', ResultType.TMP.name, '-C', remote_test_root, remote_results_name])
docker_get(args, test_id, remote_temp_file, local_result_fd.name)
run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root])
finally:
if httptester_id:
docker_rm(args, httptester_id)
if test_id:
if args.docker_terminate == 'always' or (args.docker_terminate == 'success' and success):
docker_rm(args, test_id)
def delegate_remote(args, exclude, require, integration_targets):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
:type integration_targets: tuple[IntegrationTarget]
"""
remote = args.parsed_remote
core_ci = AnsibleCoreCI(args, remote.platform, remote.version, stage=args.remote_stage, provider=args.remote_provider, arch=remote.arch)
success = False
raw = False
if isinstance(args, ShellConfig):
use_httptester = args.httptester
raw = args.raw
else:
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
if use_httptester and not docker_available():
display.warning('Assuming --disable-httptester since `docker` is not available.')
use_httptester = False
httptester_id = None
ssh_options = []
content_root = None
try:
core_ci.start()
if use_httptester:
httptester_id, ssh_options = start_httptester(args)
core_ci.wait()
python_version = get_python_version(args, get_remote_completion(), args.remote)
if remote.platform == 'windows':
# Windows doesn't need the ansible-test fluff, just run the SSH command
manage = ManageWindowsCI(core_ci)
manage.setup(python_version)
cmd = ['powershell.exe']
elif raw:
manage = ManagePosixCI(core_ci)
manage.setup(python_version)
cmd = create_shell_command(['sh'])
else:
manage = ManagePosixCI(core_ci)
pwd = manage.setup(python_version)
options = {
'--remote': 1,
}
python_interpreter = get_python_interpreter(args, get_remote_completion(), args.remote)
ansible_root = os.path.join(pwd, 'ansible')
if data_context().content.collection:
content_root = os.path.join(pwd, data_context().content.collection.directory)
else:
content_root = ansible_root
cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
if httptester_id:
cmd += ['--inject-httptester', '--httptester-krb5-password', args.httptester_krb5_password]
if isinstance(args, TestConfig):
if args.coverage and not args.coverage_label:
cmd += ['--coverage-label', 'remote-%s-%s' % (remote.platform, remote.version)]
if isinstance(args, IntegrationConfig):
if not args.allow_destructive:
cmd.append('--allow-destructive')
# remote instances are only expected to have a single python version available
if isinstance(args, UnitsConfig) and not args.python:
cmd += ['--python', 'default']
if isinstance(args, IntegrationConfig):
cloud_platforms = get_cloud_providers(args)
for cloud_platform in cloud_platforms:
ssh_options += cloud_platform.get_remote_ssh_options()
try:
manage.ssh(cmd, ssh_options)
success = True
finally:
download = False
if remote.platform != 'windows':
download = True
if isinstance(args, ShellConfig):
if args.raw:
download = False
if download and content_root:
local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
remote_results_root = os.path.join(content_root, data_context().content.results_path)
remote_results_name = os.path.basename(remote_results_root)
remote_temp_path = os.path.join('/tmp', remote_results_name)
# AIX cp and GNU cp provide different options, no way could be found to have a common
# pattern and achieve the same goal
cp_opts = '-hr' if remote.platform in ['aix', 'ibmi'] else '-a'
manage.ssh('rm -rf {0} && mkdir {0} && cp {1} {2}/* {0}/ && chmod -R a+r {0}'.format(remote_temp_path, cp_opts, remote_results_root))
manage.download(remote_temp_path, local_test_root)
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
core_ci.stop()
if httptester_id:
docker_rm(args, httptester_id)
def generate_command(args, python_interpreter, ansible_bin_path, content_root, options, exclude, require):
"""
:type args: EnvironmentConfig
:type python_interpreter: str | None
:type ansible_bin_path: str
:type content_root: str
:type options: dict[str, int]
:type exclude: list[str]
:type require: list[str]
:rtype: list[str]
"""
options['--color'] = 1
cmd = [os.path.join(ansible_bin_path, 'ansible-test')]
if python_interpreter:
cmd = [python_interpreter] + cmd
# Force the encoding used during delegation.
# This is only needed because ansible-test relies on Python's file system encoding.
# Environments that do not have the locale configured are thus unable to work with unicode file paths.
# Examples include FreeBSD and some Linux containers.
env_vars = dict(
LC_ALL='en_US.UTF-8',
ANSIBLE_TEST_CONTENT_ROOT=content_root,
)
env_args = ['%s=%s' % (key, env_vars[key]) for key in sorted(env_vars)]
cmd = ['/usr/bin/env'] + env_args + cmd
cmd += list(filter_options(args, sys.argv[1:], options, exclude, require))
cmd += ['--color', 'yes' if args.color else 'no']
if args.requirements:
cmd += ['--requirements']
if isinstance(args, ShellConfig):
cmd = create_shell_command(cmd)
elif isinstance(args, SanityConfig):
base_branch = args.base_branch or get_ci_provider().get_base_branch()
if base_branch:
cmd += ['--base-branch', base_branch]
return cmd
def filter_options(args, argv, options, exclude, require):
"""
:type args: EnvironmentConfig
:type argv: list[str]
:type options: dict[str, int]
:type exclude: list[str]
:type require: list[str]
:rtype: collections.Iterable[str]
"""
options = options.copy()
options['--requirements'] = 0
options['--truncate'] = 1
options['--redact'] = 0
options['--no-redact'] = 0
if isinstance(args, TestConfig):
options.update({
'--changed': 0,
'--tracked': 0,
'--untracked': 0,
'--ignore-committed': 0,
'--ignore-staged': 0,
'--ignore-unstaged': 0,
'--changed-from': 1,
'--changed-path': 1,
'--metadata': 1,
'--exclude': 1,
'--require': 1,
})
elif isinstance(args, SanityConfig):
options.update({
'--base-branch': 1,
})
if isinstance(args, IntegrationConfig):
options.update({
'--no-temp-unicode': 0,
'--no-pip-check': 0,
})
if isinstance(args, (NetworkIntegrationConfig, WindowsIntegrationConfig)):
options.update({
'--inventory': 1,
})
remaining = 0
for arg in argv:
if not arg.startswith('-') and remaining:
remaining -= 1
continue
remaining = 0
parts = arg.split('=', 1)
key = parts[0]
if key in options:
remaining = options[key] - len(parts) + 1
continue
yield arg
for arg in args.delegate_args:
yield arg
for target in exclude:
yield '--exclude'
yield target
for target in require:
yield '--require'
yield target
if isinstance(args, TestConfig):
if args.metadata_path:
yield '--metadata'
yield args.metadata_path
yield '--truncate'
yield '%d' % args.truncate
if args.redact:
yield '--redact'
else:
yield '--no-redact'
if isinstance(args, IntegrationConfig):
if args.no_temp_unicode:
yield '--no-temp-unicode'
if not args.pip_check:
yield '--no-pip-check'
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,120 |
Python 2.6 unit and sanity test failures during SNI brownouts
|
### Summary
I know there is some ongoing work to mitigate these problems, but since I'm not aware which parts have already been addressed and which not, I want to share some common failures here which I've seen during the last days:
1. When running `ansible-test sanity --color -v --junit --coverage-check --changed --docker --base-branch origin/main --skip-test pylint --skip-test ansible-doc --skip-test validate-modules --allow-disabled` in CI, the Python 2.6 `import` test fails:
```
02:05 ERROR: Command "/root/ansible_collections/community/general/tests/output/.tmp/sanity/import/minimal-py26/bin/python /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
02:05 >>> Standard Error
02:05 Could not find a version that satisfies the requirement coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2)) (from versions: )
02:05 No matching distribution found for coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
02:05 >>> Standard Output
02:05 Collecting coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
```
(See https://app.shippable.com/github/ansible-collections/community.general/runs/8257/1/console for the full log)
2. When running unit tests for Python 2.6 with `ansible-test units -v --color --docker --coverage` with `unittest2 ; python_version < '2.7'` in tests/unit/requirements.txt, it fails:
```
Run command: /usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt
Collecting unittest2 (from -r tests/unit/requirements.txt (line 1))
Could not find a version that satisfies the requirement unittest2 (from -r tests/unit/requirements.txt (line 1)) (from versions: )
No matching distribution found for unittest2 (from -r tests/unit/requirements.txt (line 1))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
```
(See https://github.com/ansible-collections/community.hrobot/runs/2264772861?check_suite_focus=true)
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
...
|
https://github.com/ansible/ansible/issues/74120
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-04-05T08:22:07Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/docker_util.py
|
"""Functions for accessing docker via the docker cli."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import time
from . import types as t
from .io import (
open_binary_file,
read_text_file,
)
from .util import (
ApplicationError,
common_environment,
display,
find_executable,
SubprocessError,
)
from .http import (
urlparse,
)
from .util_common import (
run_command,
)
from .config import (
EnvironmentConfig,
)
BUFFER_SIZE = 256 * 256
def docker_available():
"""
:rtype: bool
"""
return find_executable('docker', required=False)
def get_docker_hostname(): # type: () -> str
"""Return the hostname of the Docker service."""
try:
return get_docker_hostname.hostname
except AttributeError:
pass
docker_host = os.environ.get('DOCKER_HOST')
if docker_host and docker_host.startswith('tcp://'):
try:
hostname = urlparse(docker_host)[1].split(':')[0]
display.info('Detected Docker host: %s' % hostname, verbosity=1)
except ValueError:
hostname = 'localhost'
display.warning('Could not parse DOCKER_HOST environment variable "%s", falling back to localhost.' % docker_host)
else:
hostname = 'localhost'
display.info('Assuming Docker is available on localhost.', verbosity=1)
get_docker_hostname.hostname = hostname
return hostname
def get_docker_container_id():
"""
:rtype: str | None
"""
try:
return get_docker_container_id.container_id
except AttributeError:
pass
path = '/proc/self/cpuset'
container_id = None
if os.path.exists(path):
# File content varies based on the environment:
# No Container: /
# Docker: /docker/c86f3732b5ba3d28bb83b6e14af767ab96abbc52de31313dcb1176a62d91a507
# Azure Pipelines (Docker): /azpl_job/0f2edfed602dd6ec9f2e42c867f4d5ee640ebf4c058e6d3196d4393bb8fd0891
# Podman: /../../../../../..
contents = read_text_file(path)
cgroup_path, cgroup_name = os.path.split(contents.strip())
if cgroup_path in ('/docker', '/azpl_job'):
container_id = cgroup_name
get_docker_container_id.container_id = container_id
if container_id:
display.info('Detected execution in Docker container: %s' % container_id, verbosity=1)
return container_id
def get_docker_container_ip(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
:rtype: str
"""
results = docker_inspect(args, container_id)
network_settings = results[0]['NetworkSettings']
networks = network_settings.get('Networks')
if networks:
network_name = get_docker_preferred_network_name(args)
ipaddress = networks[network_name]['IPAddress']
else:
# podman doesn't provide Networks, fall back to using IPAddress
ipaddress = network_settings['IPAddress']
if not ipaddress:
raise ApplicationError('Cannot retrieve IP address for container: %s' % container_id)
return ipaddress
def get_docker_network_name(args, container_id): # type: (EnvironmentConfig, str) -> str
"""
Return the network name of the specified container.
Raises an exception if zero or more than one network is found.
"""
networks = get_docker_networks(args, container_id)
if not networks:
raise ApplicationError('No network found for Docker container: %s.' % container_id)
if len(networks) > 1:
raise ApplicationError('Found multiple networks for Docker container %s instead of only one: %s' % (container_id, ', '.join(networks)))
return networks[0]
def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
"""
Return the preferred network name for use with Docker. The selection logic is:
- the network selected by the user with `--docker-network`
- the network of the currently running docker container (if any)
- the default docker network (returns None)
"""
network = None
if args.docker_network:
network = args.docker_network
else:
current_container_id = get_docker_container_id()
if current_container_id:
# Make sure any additional containers we launch use the same network as the current container we're running in.
# This is needed when ansible-test is running in a container that is not connected to Docker's default network.
network = get_docker_network_name(args, current_container_id)
return network
def is_docker_user_defined_network(network): # type: (str) -> bool
"""Return True if the network being used is a user-defined network."""
return network and network != 'bridge'
def get_docker_networks(args, container_id):
"""
:param args: EnvironmentConfig
:param container_id: str
:rtype: list[str]
"""
results = docker_inspect(args, container_id)
# podman doesn't return Networks- just silently return None if it's missing...
networks = results[0]['NetworkSettings'].get('Networks')
if networks is None:
return None
return sorted(networks)
def docker_pull(args, image):
"""
:type args: EnvironmentConfig
:type image: str
"""
if ('@' in image or ':' in image) and docker_images(args, image):
display.info('Skipping docker pull of existing image with tag or digest: %s' % image, verbosity=2)
return
if not args.docker_pull:
display.warning('Skipping docker pull for "%s". Image may be out-of-date.' % image)
return
for _iteration in range(1, 10):
try:
docker_command(args, ['pull', image])
return
except SubprocessError:
display.warning('Failed to pull docker image "%s". Waiting a few seconds before trying again.' % image)
time.sleep(3)
raise ApplicationError('Failed to pull docker image "%s".' % image)
def docker_put(args, container_id, src, dst):
"""
:type args: EnvironmentConfig
:type container_id: str
:type src: str
:type dst: str
"""
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
with open_binary_file(src) as src_fd:
docker_exec(args, container_id, ['dd', 'of=%s' % dst, 'bs=%s' % BUFFER_SIZE],
options=['-i'], stdin=src_fd, capture=True)
def docker_get(args, container_id, src, dst):
"""
:type args: EnvironmentConfig
:type container_id: str
:type src: str
:type dst: str
"""
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
with open_binary_file(dst, 'wb') as dst_fd:
docker_exec(args, container_id, ['dd', 'if=%s' % src, 'bs=%s' % BUFFER_SIZE],
options=['-i'], stdout=dst_fd, capture=True)
def docker_run(args, image, options, cmd=None, create_only=False):
"""
:type args: EnvironmentConfig
:type image: str
:type options: list[str] | None
:type cmd: list[str] | None
:type create_only[bool] | False
:rtype: str | None, str | None
"""
if not options:
options = []
if not cmd:
cmd = []
if create_only:
command = 'create'
else:
command = 'run'
network = get_docker_preferred_network_name(args)
if is_docker_user_defined_network(network):
# Only when the network is not the default bridge network.
# Using this with the default bridge network results in an error when using --link: links are only supported for user-defined networks
options.extend(['--network', network])
for _iteration in range(1, 3):
try:
return docker_command(args, [command] + options + [image] + cmd, capture=True)
except SubprocessError as ex:
display.error(ex)
display.warning('Failed to run docker image "%s". Waiting a few seconds before trying again.' % image)
time.sleep(3)
raise ApplicationError('Failed to run docker image "%s".' % image)
def docker_start(args, container_id, options): # type: (EnvironmentConfig, str, t.List[str]) -> (t.Optional[str], t.Optional[str])
"""
Start a docker container by name or ID
"""
if not options:
options = []
for _iteration in range(1, 3):
try:
return docker_command(args, ['start'] + options + [container_id], capture=True)
except SubprocessError as ex:
display.error(ex)
display.warning('Failed to start docker container "%s". Waiting a few seconds before trying again.' % container_id)
time.sleep(3)
raise ApplicationError('Failed to run docker container "%s".' % container_id)
def docker_images(args, image):
"""
:param args: CommonConfig
:param image: str
:rtype: list[dict[str, any]]
"""
try:
stdout, _dummy = docker_command(args, ['images', image, '--format', '{{json .}}'], capture=True, always=True)
except SubprocessError as ex:
if 'no such image' in ex.stderr:
return [] # podman does not handle this gracefully, exits 125
if 'function "json" not defined' in ex.stderr:
# podman > 2 && < 2.2.0 breaks with --format {{json .}}, and requires --format json
# So we try this as a fallback. If it fails again, we just raise the exception and bail.
stdout, _dummy = docker_command(args, ['images', image, '--format', 'json'], capture=True, always=True)
else:
raise ex
if stdout.startswith('['):
# modern podman outputs a pretty-printed json list. Just load the whole thing.
return json.loads(stdout)
# docker outputs one json object per line (jsonl)
return [json.loads(line) for line in stdout.splitlines()]
def docker_rm(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
"""
try:
docker_command(args, ['rm', '-f', container_id], capture=True)
except SubprocessError as ex:
if 'no such container' in ex.stderr:
pass # podman does not handle this gracefully, exits 1
else:
raise ex
def docker_inspect(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
:rtype: list[dict]
"""
if args.explain:
return []
try:
stdout = docker_command(args, ['inspect', container_id], capture=True)[0]
return json.loads(stdout)
except SubprocessError as ex:
if 'no such image' in ex.stderr:
return [] # podman does not handle this gracefully, exits 125
try:
return json.loads(ex.stdout)
except Exception:
raise ex
def docker_network_disconnect(args, container_id, network):
"""
:param args: EnvironmentConfig
:param container_id: str
:param network: str
"""
docker_command(args, ['network', 'disconnect', network, container_id], capture=True)
def docker_network_inspect(args, network):
"""
:type args: EnvironmentConfig
:type network: str
:rtype: list[dict]
"""
if args.explain:
return []
try:
stdout = docker_command(args, ['network', 'inspect', network], capture=True)[0]
return json.loads(stdout)
except SubprocessError as ex:
try:
return json.loads(ex.stdout)
except Exception:
raise ex
def docker_exec(args, container_id, cmd, options=None, capture=False, stdin=None, stdout=None, data=None):
"""
:type args: EnvironmentConfig
:type container_id: str
:type cmd: list[str]
:type options: list[str] | None
:type capture: bool
:type stdin: BinaryIO | None
:type stdout: BinaryIO | None
:type data: str | None
:rtype: str | None, str | None
"""
if not options:
options = []
if data:
options.append('-i')
return docker_command(args, ['exec'] + options + [container_id] + cmd, capture=capture, stdin=stdin, stdout=stdout, data=data)
def docker_info(args):
"""
:param args: CommonConfig
:rtype: dict[str, any]
"""
stdout, _dummy = docker_command(args, ['info', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
def docker_version(args):
"""
:param args: CommonConfig
:rtype: dict[str, any]
"""
stdout, _dummy = docker_command(args, ['version', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
def docker_command(args, cmd, capture=False, stdin=None, stdout=None, always=False, data=None):
"""
:type args: CommonConfig
:type cmd: list[str]
:type capture: bool
:type stdin: file | None
:type stdout: file | None
:type always: bool
:type data: str | None
:rtype: str | None, str | None
"""
env = docker_environment()
return run_command(args, ['docker'] + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, always=always, data=data)
def docker_environment():
"""
:rtype: dict[str, str]
"""
env = common_environment()
env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_')))
return env
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,120 |
Python 2.6 unit and sanity test failures during SNI brownouts
|
### Summary
I know there is some ongoing work to mitigate these problems, but since I'm not aware which parts have already been addressed and which not, I want to share some common failures here which I've seen during the last days:
1. When running `ansible-test sanity --color -v --junit --coverage-check --changed --docker --base-branch origin/main --skip-test pylint --skip-test ansible-doc --skip-test validate-modules --allow-disabled` in CI, the Python 2.6 `import` test fails:
```
02:05 ERROR: Command "/root/ansible_collections/community/general/tests/output/.tmp/sanity/import/minimal-py26/bin/python /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
02:05 >>> Standard Error
02:05 Could not find a version that satisfies the requirement coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2)) (from versions: )
02:05 No matching distribution found for coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
02:05 >>> Standard Output
02:05 Collecting coverage<5.0.0,>=4.5.1 (from -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt (line 2))
```
(See https://app.shippable.com/github/ansible-collections/community.general/runs/8257/1/console for the full log)
2. When running unit tests for Python 2.6 with `ansible-test units -v --color --docker --coverage` with `unittest2 ; python_version < '2.7'` in tests/unit/requirements.txt, it fails:
```
Run command: /usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt
Collecting unittest2 (from -r tests/unit/requirements.txt (line 1))
Could not find a version that satisfies the requirement unittest2 (from -r tests/unit/requirements.txt (line 1)) (from versions: )
No matching distribution found for unittest2 (from -r tests/unit/requirements.txt (line 1))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt coverage -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 1.
```
(See https://github.com/ansible-collections/community.hrobot/runs/2264772861?check_suite_focus=true)
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
devel
```
### Configuration
...
### OS / Environment
...
### Steps to Reproduce
...
### Expected Results
...
### Actual Results
...
|
https://github.com/ansible/ansible/issues/74120
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-04-05T08:22:07Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/executor.py
|
"""Execute Ansible tests."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import datetime
import re
import time
import textwrap
import functools
import difflib
import filecmp
import random
import string
import shutil
from . import types as t
from .thread import (
WrappedThread,
)
from .core_ci import (
AnsibleCoreCI,
SshKey,
)
from .manage_ci import (
ManageWindowsCI,
ManageNetworkCI,
get_network_settings,
)
from .cloud import (
cloud_filter,
cloud_init,
get_cloud_environment,
get_cloud_platforms,
CloudEnvironmentConfig,
)
from .io import (
make_dirs,
open_text_file,
read_text_file,
write_text_file,
)
from .util import (
ApplicationWarning,
ApplicationError,
SubprocessError,
display,
remove_tree,
find_executable,
raw_command,
get_available_port,
generate_pip_command,
find_python,
cmd_quote,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_TEST_CONFIG_ROOT,
get_ansible_version,
tempdir,
open_zipfile,
SUPPORTED_PYTHON_VERSIONS,
str_to_version,
version_to_str,
get_hash,
)
from .util_common import (
get_docker_completion,
get_remote_completion,
get_python_path,
intercept_command,
named_temporary_file,
run_command,
write_json_test_results,
ResultType,
handle_layout_messages,
)
from .docker_util import (
docker_pull,
docker_run,
docker_available,
docker_rm,
get_docker_container_id,
get_docker_container_ip,
get_docker_hostname,
get_docker_preferred_network_name,
is_docker_user_defined_network,
)
from .ansible_util import (
ansible_environment,
check_pyyaml,
)
from .target import (
IntegrationTarget,
walk_internal_targets,
walk_posix_integration_targets,
walk_network_integration_targets,
walk_windows_integration_targets,
TIntegrationTarget,
)
from .ci import (
get_ci_provider,
)
from .classification import (
categorize_changes,
)
from .config import (
TestConfig,
EnvironmentConfig,
IntegrationConfig,
NetworkIntegrationConfig,
PosixIntegrationConfig,
ShellConfig,
WindowsIntegrationConfig,
TIntegrationConfig,
)
from .metadata import (
ChangeDescription,
)
from .integration import (
integration_test_environment,
integration_test_config_file,
setup_common_temp_dir,
get_inventory_relative_path,
check_inventory,
delegate_inventory,
)
from .data import (
data_context,
)
HTTPTESTER_HOSTS = (
'ansible.http.tests',
'sni1.ansible.http.tests',
'fail.ansible.http.tests',
'self-signed.ansible.http.tests',
)
def check_startup():
"""Checks to perform at startup before running commands."""
check_legacy_modules()
def check_legacy_modules():
"""Detect conflicts with legacy core/extras module directories to avoid problems later."""
for directory in 'core', 'extras':
path = 'lib/ansible/modules/%s' % directory
for root, _dir_names, file_names in os.walk(path):
if file_names:
# the directory shouldn't exist, but if it does, it must contain no files
raise ApplicationError('Files prohibited in "%s". '
'These are most likely legacy modules from version 2.2 or earlier.' % root)
def create_shell_command(command):
"""
:type command: list[str]
:rtype: list[str]
"""
optional_vars = (
'TERM',
)
cmd = ['/usr/bin/env']
cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
cmd += command
return cmd
def get_openssl_version(args, python, python_version): # type: (EnvironmentConfig, str, str) -> t.Optional[t.Tuple[int, ...]]
"""Return the openssl version."""
if not python_version.startswith('2.'):
# OpenSSL version checking only works on Python 3.x.
# This should be the most accurate, since it is the Python we will be using.
version = json.loads(run_command(args, [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'sslcheck.py')], capture=True, always=True)[0])['version']
if version:
display.info('Detected OpenSSL version %s under Python %s.' % (version_to_str(version), python_version), verbosity=1)
return tuple(version)
# Fall back to detecting the OpenSSL version from the CLI.
# This should provide an adequate solution on Python 2.x.
openssl_path = find_executable('openssl', required=False)
if openssl_path:
try:
result = raw_command([openssl_path, 'version'], capture=True)[0]
except SubprocessError:
result = ''
match = re.search(r'^OpenSSL (?P<version>[0-9]+\.[0-9]+\.[0-9]+)', result)
if match:
version = str_to_version(match.group('version'))
display.info('Detected OpenSSL version %s using the openssl CLI.' % version_to_str(version), verbosity=1)
return version
display.info('Unable to detect OpenSSL version.', verbosity=1)
return None
def get_setuptools_version(args, python): # type: (EnvironmentConfig, str) -> t.Tuple[int]
"""Return the setuptools version for the given python."""
try:
return str_to_version(raw_command([python, '-c', 'import setuptools; print(setuptools.__version__)'], capture=True)[0])
except SubprocessError:
if args.explain:
return tuple() # ignore errors in explain mode in case setuptools is not aleady installed
raise
def install_cryptography(args, python, python_version, pip): # type: (EnvironmentConfig, str, str, t.List[str]) -> None
"""
Install cryptography for the specified environment.
"""
# make sure ansible-test's basic requirements are met before continuing
# this is primarily to ensure that pip is new enough to facilitate further requirements installation
install_ansible_test_requirements(args, pip)
# make sure setuptools is available before trying to install cryptography
# the installed version of setuptools affects the version of cryptography to install
run_command(args, generate_pip_install(pip, '', packages=['setuptools']))
# install the latest cryptography version that the current requirements can support
# use a custom constraints file to avoid the normal constraints file overriding the chosen version of cryptography
# if not installed here later install commands may try to install an unsupported version due to the presence of older setuptools
# this is done instead of upgrading setuptools to allow tests to function with older distribution provided versions of setuptools
run_command(args, generate_pip_install(pip, '',
packages=[get_cryptography_requirement(args, python, python_version)],
constraints=os.path.join(ANSIBLE_TEST_DATA_ROOT, 'cryptography-constraints.txt')))
def get_cryptography_requirement(args, python, python_version): # type: (EnvironmentConfig, str, str) -> str
"""
Return the correct cryptography requirement for the given python version.
The version of cryptography installed depends on the python version, setuptools version and openssl version.
"""
setuptools_version = get_setuptools_version(args, python)
openssl_version = get_openssl_version(args, python, python_version)
if setuptools_version >= (18, 5):
if python_version == '2.6':
# cryptography 2.2+ requires python 2.7+
# see https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst#22---2018-03-19
cryptography = 'cryptography < 2.2'
elif openssl_version and openssl_version < (1, 1, 0):
# cryptography 3.2 requires openssl 1.1.x or later
# see https://cryptography.io/en/latest/changelog.html#v3-2
cryptography = 'cryptography < 3.2'
else:
# cryptography 3.4+ fails to install on many systems
# this is a temporary work-around until a more permanent solution is available
cryptography = 'cryptography < 3.4'
else:
# cryptography 2.1+ requires setuptools 18.5+
# see https://github.com/pyca/cryptography/blob/62287ae18383447585606b9d0765c0f1b8a9777c/setup.py#L26
cryptography = 'cryptography < 2.1'
return cryptography
def install_command_requirements(args, python_version=None, context=None, enable_pyyaml_check=False):
"""
:type args: EnvironmentConfig
:type python_version: str | None
:type context: str | None
:type enable_pyyaml_check: bool
"""
if not args.explain:
make_dirs(ResultType.COVERAGE.path)
make_dirs(ResultType.DATA.path)
if isinstance(args, ShellConfig):
if args.raw:
return
if not args.requirements:
return
if isinstance(args, ShellConfig):
return
packages = []
if isinstance(args, TestConfig):
if args.coverage:
packages.append('coverage')
if args.junit:
packages.append('junit-xml')
if not python_version:
python_version = args.python_version
python = find_python(python_version)
pip = generate_pip_command(python)
# skip packages which have aleady been installed for python_version
try:
package_cache = install_command_requirements.package_cache
except AttributeError:
package_cache = install_command_requirements.package_cache = {}
installed_packages = package_cache.setdefault(python_version, set())
skip_packages = [package for package in packages if package in installed_packages]
for package in skip_packages:
packages.remove(package)
installed_packages.update(packages)
if args.command != 'sanity':
install_cryptography(args, python, python_version, pip)
commands = [generate_pip_install(pip, args.command, packages=packages, context=context)]
if isinstance(args, IntegrationConfig):
for cloud_platform in get_cloud_platforms(args):
commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
commands = [cmd for cmd in commands if cmd]
if not commands:
return # no need to detect changes or run pip check since we are not making any changes
# only look for changes when more than one requirements file is needed
detect_pip_changes = len(commands) > 1
# first pass to install requirements, changes expected unless environment is already set up
install_ansible_test_requirements(args, pip)
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if changes:
# second pass to check for conflicts in requirements, changes are not expected here
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if changes:
raise ApplicationError('Conflicts detected in requirements. The following commands reported changes during verification:\n%s' %
'\n'.join((' '.join(cmd_quote(c) for c in cmd) for cmd in changes)))
if args.pip_check:
# ask pip to check for conflicts between installed packages
try:
run_command(args, pip + ['check', '--disable-pip-version-check'], capture=True)
except SubprocessError as ex:
if ex.stderr.strip() == 'ERROR: unknown command "check"':
display.warning('Cannot check pip requirements for conflicts because "pip check" is not supported.')
else:
raise
if enable_pyyaml_check:
# pyyaml may have been one of the requirements that was installed, so perform an optional check for it
check_pyyaml(args, python_version, required=False)
def install_ansible_test_requirements(args, pip): # type: (EnvironmentConfig, t.List[str]) -> None
"""Install requirements for ansible-test for the given pip if not already installed."""
try:
installed = install_command_requirements.installed
except AttributeError:
installed = install_command_requirements.installed = set()
if tuple(pip) in installed:
return
# make sure basic ansible-test requirements are met, including making sure that pip is recent enough to support constraints
# virtualenvs created by older distributions may include very old pip versions, such as those created in the centos6 test container (pip 6.0.8)
run_command(args, generate_pip_install(pip, 'ansible-test', use_constraints=False))
installed.add(tuple(pip))
def run_pip_commands(args, pip, commands, detect_pip_changes=False):
"""
:type args: EnvironmentConfig
:type pip: list[str]
:type commands: list[list[str]]
:type detect_pip_changes: bool
:rtype: list[list[str]]
"""
changes = []
after_list = pip_list(args, pip) if detect_pip_changes else None
for cmd in commands:
if not cmd:
continue
before_list = after_list
run_command(args, cmd)
after_list = pip_list(args, pip) if detect_pip_changes else None
if before_list != after_list:
changes.append(cmd)
return changes
def pip_list(args, pip):
"""
:type args: EnvironmentConfig
:type pip: list[str]
:rtype: str
"""
stdout = run_command(args, pip + ['list'], capture=True)[0]
return stdout
def generate_pip_install(pip, command, packages=None, constraints=None, use_constraints=True, context=None):
"""
:type pip: list[str]
:type command: str
:type packages: list[str] | None
:type constraints: str | None
:type use_constraints: bool
:type context: str | None
:rtype: list[str] | None
"""
constraints = constraints or os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'constraints.txt')
requirements = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', '%s.txt' % ('%s.%s' % (command, context) if context else command))
content_constraints = None
options = []
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
if command == 'sanity' and data_context().content.is_ansible:
requirements = os.path.join(data_context().content.sanity_path, 'code-smell', '%s.requirements.txt' % context)
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
if command == 'units':
requirements = os.path.join(data_context().content.unit_path, 'requirements.txt')
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
content_constraints = os.path.join(data_context().content.unit_path, 'constraints.txt')
if command in ('integration', 'windows-integration', 'network-integration'):
requirements = os.path.join(data_context().content.integration_path, 'requirements.txt')
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
requirements = os.path.join(data_context().content.integration_path, '%s.requirements.txt' % command)
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
content_constraints = os.path.join(data_context().content.integration_path, 'constraints.txt')
if command.startswith('integration.cloud.'):
content_constraints = os.path.join(data_context().content.integration_path, 'constraints.txt')
if packages:
options += packages
if not options:
return None
if use_constraints:
if content_constraints and os.path.exists(content_constraints) and os.path.getsize(content_constraints):
# listing content constraints first gives them priority over constraints provided by ansible-test
options.extend(['-c', content_constraints])
options.extend(['-c', constraints])
return pip + ['install', '--disable-pip-version-check'] + options
def command_shell(args):
"""
:type args: ShellConfig
"""
if args.delegate:
raise Delegate()
install_command_requirements(args)
if args.inject_httptester:
inject_httptester(args)
cmd = create_shell_command(['bash', '-i'])
run_command(args, cmd)
def command_posix_integration(args):
"""
:type args: PosixIntegrationConfig
"""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
inventory_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, os.path.basename(inventory_relative_path))
all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets)
command_integration_filtered(args, internal_targets, all_targets, inventory_path)
def command_network_integration(args):
"""
:type args: NetworkIntegrationConfig
"""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
if args.inventory:
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
else:
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
if args.no_temp_workdir:
# temporary solution to keep DCI tests working
inventory_exists = os.path.exists(inventory_path)
else:
inventory_exists = os.path.isfile(inventory_path)
if not args.explain and not args.platform and not inventory_exists:
raise ApplicationError(
'Inventory not found: %s\n'
'Use --inventory to specify the inventory path.\n'
'Use --platform to provision resources and generate an inventory file.\n'
'See also inventory template: %s' % (inventory_path, template_path)
)
check_inventory(args, inventory_path)
delegate_inventory(args, inventory_path)
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
instances = [] # type: t.List[WrappedThread]
if args.platform:
get_python_path(args, args.python_executable) # initialize before starting threads
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
config = configs.get(platform_version)
if not config:
continue
instance = WrappedThread(functools.partial(network_run, args, platform, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = network_inventory(args, remotes)
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
if not args.explain:
write_text_file(inventory_path, inventory)
success = False
try:
command_integration_filtered(args, internal_targets, all_targets, inventory_path)
success = True
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
instance.result.stop()
def network_init(args, internal_targets): # type: (NetworkIntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> None
"""Initialize platforms for network integration tests."""
if not args.platform:
return
if args.metadata.instance_config is not None:
return
platform_targets = set(a for target in internal_targets for a in target.aliases if a.startswith('network/'))
instances = [] # type: t.List[WrappedThread]
# generate an ssh key (if needed) up front once, instead of for each instance
SshKey(args)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
platform_target = 'network/%s/' % platform
if platform_target not in platform_targets:
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
platform_version, platform))
continue
instance = WrappedThread(functools.partial(network_start, args, platform, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def network_start(args, platform, version):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
core_ci.start()
return core_ci.save()
def network_run(args, platform, version, config):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageNetworkCI(args, core_ci)
manage.wait()
return core_ci
def network_inventory(args, remotes):
"""
:type args: NetworkIntegrationConfig
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
groups = dict([(remote.platform, []) for remote in remotes])
net = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_ssh_private_key_file=os.path.abspath(remote.ssh_key.key),
)
settings = get_network_settings(args, remote.platform, remote.version)
options.update(settings.inventory_vars)
groups[remote.platform].append(
'%s %s' % (
remote.name.replace('.', '-'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
net.append(remote.platform)
groups['net:children'] = net
template = ''
for group in groups:
hosts = '\n'.join(groups[group])
template += textwrap.dedent("""
[%s]
%s
""") % (group, hosts)
inventory = template
return inventory
def command_windows_integration(args):
"""
:type args: WindowsIntegrationConfig
"""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
if args.inventory:
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
else:
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
if not args.explain and not args.windows and not os.path.isfile(inventory_path):
raise ApplicationError(
'Inventory not found: %s\n'
'Use --inventory to specify the inventory path.\n'
'Use --windows to provision resources and generate an inventory file.\n'
'See also inventory template: %s' % (inventory_path, template_path)
)
check_inventory(args, inventory_path)
delegate_inventory(args, inventory_path)
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
instances = [] # type: t.List[WrappedThread]
pre_target = None
post_target = None
httptester_id = None
if args.windows:
get_python_path(args, args.python_executable) # initialize before starting threads
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
for version in args.windows:
config = configs['windows/%s' % version]
instance = WrappedThread(functools.partial(windows_run, args, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = windows_inventory(remotes)
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
if not args.explain:
write_text_file(inventory_path, inventory)
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in internal_targets)
# if running under Docker delegation, the httptester may have already been started
docker_httptester = bool(os.environ.get("HTTPTESTER", False))
if use_httptester and not docker_available() and not docker_httptester:
display.warning('Assuming --disable-httptester since `docker` is not available.')
elif use_httptester:
if docker_httptester:
# we are running in a Docker container that is linked to the httptester container, we just need to
# forward these requests to the linked hostname
first_host = HTTPTESTER_HOSTS[0]
ssh_options = [
"-R", "8080:%s:80" % first_host,
"-R", "8443:%s:443" % first_host,
"-R", "8444:%s:444" % first_host
]
else:
# we are running directly and need to start the httptester container ourselves and forward the port
# from there manually set so HTTPTESTER env var is set during the run
args.inject_httptester = True
httptester_id, ssh_options = start_httptester(args)
# to get this SSH command to run in the background we need to set to run in background (-f) and disable
# the pty allocation (-T)
ssh_options.insert(0, "-fT")
# create a script that will continue to run in the background until the script is deleted, this will
# cleanup and close the connection
def forward_ssh_ports(target):
"""
:type target: IntegrationTarget
"""
if 'needs/httptester/' not in target.aliases:
return
for remote in [r for r in remotes if r.version != '2008']:
manage = ManageWindowsCI(remote)
manage.upload(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'windows-httptester.ps1'), watcher_path)
# We cannot pass an array of string with -File so we just use a delimiter for multiple values
script = "powershell.exe -NoProfile -ExecutionPolicy Bypass -File .\\%s -Hosts \"%s\"" \
% (watcher_path, "|".join(HTTPTESTER_HOSTS))
if args.verbosity > 3:
script += " -Verbose"
manage.ssh(script, options=ssh_options, force_pty=False)
def cleanup_ssh_ports(target):
"""
:type target: IntegrationTarget
"""
if 'needs/httptester/' not in target.aliases:
return
for remote in [r for r in remotes if r.version != '2008']:
# delete the tmp file that keeps the http-tester alive
manage = ManageWindowsCI(remote)
manage.ssh("cmd.exe /c \"del %s /F /Q\"" % watcher_path, force_pty=False)
watcher_path = "ansible-test-http-watcher-%s.ps1" % time.time()
pre_target = forward_ssh_ports
post_target = cleanup_ssh_ports
def run_playbook(playbook, run_playbook_vars): # type: (str, t.Dict[str, t.Any]) -> None
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(run_playbook_vars)]
if args.verbosity:
command.append('-%s' % ('v' * args.verbosity))
env = ansible_environment(args)
intercept_command(args, command, '', env, disable_coverage=True)
remote_temp_path = None
if args.coverage and not args.coverage_check:
# Create the remote directory that is writable by everyone. Use Ansible to talk to the remote host.
remote_temp_path = 'C:\\ansible_test_coverage_%s' % time.time()
playbook_vars = {'remote_temp_path': remote_temp_path}
run_playbook('windows_coverage_setup.yml', playbook_vars)
success = False
try:
command_integration_filtered(args, internal_targets, all_targets, inventory_path, pre_target=pre_target,
post_target=post_target, remote_temp_path=remote_temp_path)
success = True
finally:
if httptester_id:
docker_rm(args, httptester_id)
if remote_temp_path:
# Zip up the coverage files that were generated and fetch it back to localhost.
with tempdir() as local_temp_path:
playbook_vars = {'remote_temp_path': remote_temp_path, 'local_temp_path': local_temp_path}
run_playbook('windows_coverage_teardown.yml', playbook_vars)
for filename in os.listdir(local_temp_path):
with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip:
coverage_zip.extractall(ResultType.COVERAGE.path)
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
instance.result.stop()
# noinspection PyUnusedLocal
def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
"""
:type args: WindowsIntegrationConfig
:type internal_targets: tuple[IntegrationTarget]
"""
if not args.windows:
return
if args.metadata.instance_config is not None:
return
instances = [] # type: t.List[WrappedThread]
for version in args.windows:
instance = WrappedThread(functools.partial(windows_start, args, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def windows_start(args, version):
"""
:type args: WindowsIntegrationConfig
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider)
core_ci.start()
return core_ci.save()
def windows_run(args, version, config):
"""
:type args: WindowsIntegrationConfig
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageWindowsCI(core_ci)
manage.wait()
return core_ci
def windows_inventory(remotes):
"""
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
hosts = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_password=remote.connection.password,
ansible_port=remote.connection.port,
)
# used for the connection_windows_ssh test target
if remote.ssh_key:
options["ansible_ssh_private_key_file"] = os.path.abspath(remote.ssh_key.key)
if remote.name == 'windows-2008':
options.update(
# force 2008 to use PSRP for the connection plugin
ansible_connection='psrp',
ansible_psrp_auth='basic',
ansible_psrp_cert_validation='ignore',
)
elif remote.name == 'windows-2016':
options.update(
# force 2016 to use NTLM + HTTP message encryption
ansible_connection='winrm',
ansible_winrm_server_cert_validation='ignore',
ansible_winrm_transport='ntlm',
ansible_winrm_scheme='http',
ansible_port='5985',
)
else:
options.update(
ansible_connection='winrm',
ansible_winrm_server_cert_validation='ignore',
)
hosts.append(
'%s %s' % (
remote.name.replace('/', '_'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
template = """
[windows]
%s
# support winrm binary module tests (temporary solution)
[testhost:children]
windows
"""
template = textwrap.dedent(template)
inventory = template % ('\n'.join(hosts))
return inventory
def command_integration_filter(args, # type: TIntegrationConfig
targets, # type: t.Iterable[TIntegrationTarget]
init_callback=None, # type: t.Callable[[TIntegrationConfig, t.Tuple[TIntegrationTarget, ...]], None]
): # type: (...) -> t.Tuple[TIntegrationTarget, ...]
"""Filter the given integration test targets."""
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
changes = get_changes_filter(args)
# special behavior when the --changed-all-target target is selected based on changes
if args.changed_all_target in changes:
# act as though the --changed-all-target target was in the include list
if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
args.include.append(args.changed_all_target)
args.delegate_args += ['--include', args.changed_all_target]
# act as though the --changed-all-target target was in the exclude list
elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
args.exclude.append(args.changed_all_target)
require = args.require + changes
exclude = args.exclude
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
environment_exclude = get_integration_filter(args, internal_targets)
environment_exclude += cloud_filter(args, internal_targets)
if environment_exclude:
exclude += environment_exclude
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
if not internal_targets:
raise AllTargetsSkipped()
if args.start_at and not any(target.name == args.start_at for target in internal_targets):
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
if init_callback:
init_callback(args, internal_targets)
cloud_init(args, internal_targets)
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
if os.path.exists(vars_file_src):
def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""
Add the integration config vars file to the payload file list.
This will preserve the file during delegation even if the file is ignored by source control.
"""
files.append((vars_file_src, data_context().content.integration_vars_path))
data_context().register_payload_callback(integration_config_callback)
if args.delegate:
raise Delegate(require=require, exclude=exclude, integration_targets=internal_targets)
install_command_requirements(args)
return internal_targets
def command_integration_filtered(args, targets, all_targets, inventory_path, pre_target=None, post_target=None,
remote_temp_path=None):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:type all_targets: tuple[IntegrationTarget]
:type inventory_path: str
:type pre_target: (IntegrationTarget) -> None | None
:type post_target: (IntegrationTarget) -> None | None
:type remote_temp_path: str | None
"""
found = False
passed = []
failed = []
targets_iter = iter(targets)
all_targets_dict = dict((target.name, target) for target in all_targets)
setup_errors = []
setup_targets_executed = set()
for target in all_targets:
for setup_target in target.setup_once + target.setup_always:
if setup_target not in all_targets_dict:
setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))
if setup_errors:
raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
check_pyyaml(args, args.python_version)
test_dir = os.path.join(ResultType.TMP.path, 'output_dir')
if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
max_tries = 20
display.info('SSH service required for tests. Checking to make sure we can connect.')
for i in range(1, max_tries + 1):
try:
run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
display.info('SSH service responded.')
break
except SubprocessError:
if i == max_tries:
raise
seconds = 3
display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
time.sleep(seconds)
# Windows is different as Ansible execution is done locally but the host is remote
if args.inject_httptester and not isinstance(args, WindowsIntegrationConfig):
inject_httptester(args)
start_at_task = args.start_at_task
results = {}
current_environment = None # type: t.Optional[EnvironmentDescription]
# common temporary directory path that will be valid on both the controller and the remote
# it must be common because it will be referenced in environment variables that are shared across multiple hosts
common_temp_path = '/tmp/ansible-test-%s' % ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(8))
setup_common_temp_dir(args, common_temp_path)
try:
for target in targets_iter:
if args.start_at and not found:
found = target.name == args.start_at
if not found:
continue
if args.list_targets:
print(target.name)
continue
tries = 2 if args.retry_on_error else 1
verbosity = args.verbosity
cloud_environment = get_cloud_environment(args, target)
original_environment = current_environment if current_environment else EnvironmentDescription(args)
current_environment = None
display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)
try:
while tries:
tries -= 1
try:
if cloud_environment:
cloud_environment.setup_once()
run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, False)
start_time = time.time()
run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, True)
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
if pre_target:
pre_target(target)
try:
if target.script_path:
command_integration_script(args, target, test_dir, inventory_path, common_temp_path,
remote_temp_path=remote_temp_path)
else:
command_integration_role(args, target, start_at_task, test_dir, inventory_path,
common_temp_path, remote_temp_path=remote_temp_path)
start_at_task = None
finally:
if post_target:
post_target(target)
end_time = time.time()
results[target.name] = dict(
name=target.name,
type=target.type,
aliases=target.aliases,
modules=target.modules,
run_time_seconds=int(end_time - start_time),
setup_once=target.setup_once,
setup_always=target.setup_always,
coverage=args.coverage,
coverage_label=args.coverage_label,
python_version=args.python_version,
)
break
except SubprocessError:
if cloud_environment:
cloud_environment.on_failure(target, tries)
if not original_environment.validate(target.name, throw=False):
raise
if not tries:
raise
display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
display.verbosity = args.verbosity = 6
start_time = time.time()
current_environment = EnvironmentDescription(args)
end_time = time.time()
EnvironmentDescription.check(original_environment, current_environment, target.name, throw=True)
results[target.name]['validation_seconds'] = int(end_time - start_time)
passed.append(target)
except Exception as ex:
failed.append(target)
if args.continue_on_error:
display.error(ex)
continue
display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
next_target = next(targets_iter, None)
if next_target:
display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
raise
finally:
display.verbosity = args.verbosity = verbosity
finally:
if not args.explain:
if args.coverage:
coverage_temp_path = os.path.join(common_temp_path, ResultType.COVERAGE.name)
coverage_save_path = ResultType.COVERAGE.path
for filename in os.listdir(coverage_temp_path):
shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename))
remove_tree(common_temp_path)
result_name = '%s-%s.json' % (
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
data = dict(
targets=results,
)
write_json_test_results(ResultType.DATA, result_name, data)
if failed:
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
def start_httptester(args):
"""
:type args: EnvironmentConfig
:rtype: str, list[str]
"""
# map ports from remote -> localhost -> container
# passing through localhost is only used when ansible-test is not already running inside a docker container
ports = [
dict(
remote=8080,
container=80,
),
dict(
remote=8088,
container=88,
),
dict(
remote=8443,
container=443,
),
dict(
remote=8444,
container=444,
),
dict(
remote=8749,
container=749,
),
]
container_id = get_docker_container_id()
if not container_id:
for item in ports:
item['localhost'] = get_available_port()
docker_pull(args, args.httptester)
httptester_id = run_httptester(args, dict((port['localhost'], port['container']) for port in ports if 'localhost' in port))
if container_id:
container_host = get_docker_container_ip(args, httptester_id)
display.info('Found httptester container address: %s' % container_host, verbosity=1)
else:
container_host = get_docker_hostname()
ssh_options = []
for port in ports:
ssh_options += ['-R', '%d:%s:%d' % (port['remote'], container_host, port.get('localhost', port['container']))]
return httptester_id, ssh_options
def run_httptester(args, ports=None):
"""
:type args: EnvironmentConfig
:type ports: dict[int, int] | None
:rtype: str
"""
options = [
'--detach',
'--env', 'KRB5_PASSWORD=%s' % args.httptester_krb5_password,
]
if ports:
for localhost_port, container_port in ports.items():
options += ['-p', '%d:%d' % (localhost_port, container_port)]
network = get_docker_preferred_network_name(args)
if is_docker_user_defined_network(network):
# network-scoped aliases are only supported for containers in user defined networks
for alias in HTTPTESTER_HOSTS:
options.extend(['--network-alias', alias])
httptester_id = docker_run(args, args.httptester, options=options)[0]
if args.explain:
httptester_id = 'httptester_id'
else:
httptester_id = httptester_id.strip()
return httptester_id
def inject_httptester(args):
"""
:type args: CommonConfig
"""
comment = ' # ansible-test httptester\n'
append_lines = ['127.0.0.1 %s%s' % (host, comment) for host in HTTPTESTER_HOSTS]
hosts_path = '/etc/hosts'
original_lines = read_text_file(hosts_path).splitlines(True)
if not any(line.endswith(comment) for line in original_lines):
write_text_file(hosts_path, ''.join(original_lines + append_lines))
# determine which forwarding mechanism to use
pfctl = find_executable('pfctl', required=False)
iptables = find_executable('iptables', required=False)
if pfctl:
kldload = find_executable('kldload', required=False)
if kldload:
try:
run_command(args, ['kldload', 'pf'], capture=True)
except SubprocessError:
pass # already loaded
rules = '''
rdr pass inet proto tcp from any to any port 80 -> 127.0.0.1 port 8080
rdr pass inet proto tcp from any to any port 88 -> 127.0.0.1 port 8088
rdr pass inet proto tcp from any to any port 443 -> 127.0.0.1 port 8443
rdr pass inet proto tcp from any to any port 444 -> 127.0.0.1 port 8444
rdr pass inet proto tcp from any to any port 749 -> 127.0.0.1 port 8749
'''
cmd = ['pfctl', '-ef', '-']
try:
run_command(args, cmd, capture=True, data=rules)
except SubprocessError:
pass # non-zero exit status on success
elif iptables:
ports = [
(80, 8080),
(88, 8088),
(443, 8443),
(444, 8444),
(749, 8749),
]
for src, dst in ports:
rule = ['-o', 'lo', '-p', 'tcp', '--dport', str(src), '-j', 'REDIRECT', '--to-port', str(dst)]
try:
# check for existing rule
cmd = ['iptables', '-t', 'nat', '-C', 'OUTPUT'] + rule
run_command(args, cmd, capture=True)
except SubprocessError:
# append rule when it does not exist
cmd = ['iptables', '-t', 'nat', '-A', 'OUTPUT'] + rule
run_command(args, cmd, capture=True)
else:
raise ApplicationError('No supported port forwarding mechanism detected.')
def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, inventory_path, temp_path, always):
"""
:type args: IntegrationConfig
:type test_dir: str
:type target_names: list[str]
:type targets_dict: dict[str, IntegrationTarget]
:type targets_executed: set[str]
:type inventory_path: str
:type temp_path: str
:type always: bool
"""
for target_name in target_names:
if not always and target_name in targets_executed:
continue
target = targets_dict[target_name]
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
if target.script_path:
command_integration_script(args, target, test_dir, inventory_path, temp_path)
else:
command_integration_role(args, target, None, test_dir, inventory_path, temp_path)
targets_executed.add(target_name)
def integration_environment(args, target, test_dir, inventory_path, ansible_config, env_config):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type test_dir: str
:type inventory_path: str
:type ansible_config: str | None
:type env_config: CloudEnvironmentConfig | None
:rtype: dict[str, str]
"""
env = ansible_environment(args, ansible_config=ansible_config)
if args.inject_httptester:
env.update(dict(
HTTPTESTER='1',
KRB5_PASSWORD=args.httptester_krb5_password,
))
callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
integration = dict(
JUNIT_OUTPUT_DIR=ResultType.JUNIT.path,
ANSIBLE_CALLBACKS_ENABLED=','.join(sorted(set(callback_plugins))),
ANSIBLE_TEST_CI=args.metadata.ci_provider or get_ci_provider().code,
ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),
OUTPUT_DIR=test_dir,
INVENTORY_PATH=os.path.abspath(inventory_path),
)
if args.debug_strategy:
env.update(dict(ANSIBLE_STRATEGY='debug'))
if 'non_local/' in target.aliases:
if args.coverage:
display.warning('Skipping coverage reporting on Ansible modules for non-local test: %s' % target.name)
env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
env.update(integration)
return env
def command_integration_script(args, target, test_dir, inventory_path, temp_path, remote_temp_path=None):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type test_dir: str
:type inventory_path: str
:type temp_path: str
:type remote_temp_path: str | None
"""
display.info('Running %s integration test script' % target.name)
env_config = None
if isinstance(args, PosixIntegrationConfig):
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
env_config = cloud_environment.get_environment_config()
with integration_test_environment(args, target, inventory_path) as test_env:
cmd = ['./%s' % os.path.basename(target.script_path)]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
cwd = os.path.join(test_env.targets_dir, target.relative_path)
env.update(dict(
# support use of adhoc ansible commands in collections without specifying the fully qualified collection name
ANSIBLE_PLAYBOOK_DIR=cwd,
))
if env_config and env_config.env_vars:
env.update(env_config.env_vars)
with integration_test_config_file(args, env_config, test_env.integration_dir) as config_path:
if config_path:
cmd += ['-e', '@%s' % config_path]
module_coverage = 'non_local/' not in target.aliases
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
remote_temp_path=remote_temp_path, module_coverage=module_coverage)
def command_integration_role(args, target, start_at_task, test_dir, inventory_path, temp_path, remote_temp_path=None):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type start_at_task: str | None
:type test_dir: str
:type inventory_path: str
:type temp_path: str
:type remote_temp_path: str | None
"""
display.info('Running %s integration test role' % target.name)
env_config = None
vars_files = []
variables = dict(
output_dir=test_dir,
)
if isinstance(args, WindowsIntegrationConfig):
hosts = 'windows'
gather_facts = False
variables.update(dict(
win_output_dir=r'C:\ansible_testing',
))
elif isinstance(args, NetworkIntegrationConfig):
hosts = target.network_platform
gather_facts = False
else:
hosts = 'testhost'
gather_facts = True
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
env_config = cloud_environment.get_environment_config()
with integration_test_environment(args, target, inventory_path) as test_env:
if os.path.exists(test_env.vars_file):
vars_files.append(os.path.relpath(test_env.vars_file, test_env.integration_dir))
play = dict(
hosts=hosts,
gather_facts=gather_facts,
vars_files=vars_files,
vars=variables,
roles=[
target.name,
],
)
if env_config:
if env_config.ansible_vars:
variables.update(env_config.ansible_vars)
play.update(dict(
environment=env_config.env_vars,
module_defaults=env_config.module_defaults,
))
playbook = json.dumps([play], indent=4, sort_keys=True)
with named_temporary_file(args=args, directory=test_env.integration_dir, prefix='%s-' % target.name, suffix='.yml', content=playbook) as playbook_path:
filename = os.path.basename(playbook_path)
display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
cmd = ['ansible-playbook', filename, '-i', os.path.relpath(test_env.inventory_path, test_env.integration_dir)]
if start_at_task:
cmd += ['--start-at-task', start_at_task]
if args.tags:
cmd += ['--tags', args.tags]
if args.skip_tags:
cmd += ['--skip-tags', args.skip_tags]
if args.diff:
cmd += ['--diff']
if isinstance(args, NetworkIntegrationConfig):
if args.testcase:
cmd += ['-e', 'testcase=%s' % args.testcase]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
cwd = test_env.integration_dir
env.update(dict(
# support use of adhoc ansible commands in collections without specifying the fully qualified collection name
ANSIBLE_PLAYBOOK_DIR=cwd,
))
env['ANSIBLE_ROLES_PATH'] = test_env.targets_dir
module_coverage = 'non_local/' not in target.aliases
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
remote_temp_path=remote_temp_path, module_coverage=module_coverage)
def get_changes_filter(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
paths = detect_changes(args)
if not args.metadata.change_description:
if paths:
changes = categorize_changes(args, paths, args.command)
else:
changes = ChangeDescription()
args.metadata.change_description = changes
if paths is None:
return [] # change detection not enabled, do not filter targets
if not paths:
raise NoChangesDetected()
if args.metadata.change_description.targets is None:
raise NoTestsForChanges()
return args.metadata.change_description.targets
def detect_changes(args):
"""
:type args: TestConfig
:rtype: list[str] | None
"""
if args.changed:
paths = get_ci_provider().detect_changes(args)
elif args.changed_from or args.changed_path:
paths = args.changed_path or []
if args.changed_from:
paths += read_text_file(args.changed_from).splitlines()
else:
return None # change detection not enabled
if paths is None:
return None # act as though change detection not enabled, do not filter targets
display.info('Detected changes in %d file(s).' % len(paths))
for path in paths:
display.info(path, verbosity=1)
return paths
def get_integration_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
if args.docker:
return get_integration_docker_filter(args, targets)
if args.remote:
return get_integration_remote_filter(args, targets)
return get_integration_local_filter(args, targets)
def common_integration_filter(args, targets, exclude):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:type exclude: list[str]
"""
override_disabled = set(target for target in args.include if target.startswith('disabled/'))
if not args.allow_disabled:
skip = 'disabled/'
override = [target.name for target in targets if override_disabled & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-disabled or prefixing with "disabled/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_unsupported = set(target for target in args.include if target.startswith('unsupported/'))
if not args.allow_unsupported:
skip = 'unsupported/'
override = [target.name for target in targets if override_unsupported & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-unsupported or prefixing with "unsupported/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_unstable = set(target for target in args.include if target.startswith('unstable/'))
if args.allow_unstable_changed:
override_unstable |= set(args.metadata.change_description.focused_targets or [])
if not args.allow_unstable:
skip = 'unstable/'
override = [target.name for target in targets if override_unstable & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-unstable or prefixing with "unstable/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
# only skip a Windows test if using --windows and all the --windows versions are defined in the aliases as skip/windows/%s
if isinstance(args, WindowsIntegrationConfig) and args.windows:
all_skipped = []
not_skipped = []
for target in targets:
if "skip/windows/" not in target.aliases:
continue
skip_valid = []
skip_missing = []
for version in args.windows:
if "skip/windows/%s/" % version in target.aliases:
skip_valid.append(version)
else:
skip_missing.append(version)
if skip_missing and skip_valid:
not_skipped.append((target.name, skip_valid, skip_missing))
elif skip_valid:
all_skipped.append(target.name)
if all_skipped:
exclude.extend(all_skipped)
skip_aliases = ["skip/windows/%s/" % w for w in args.windows]
display.warning('Excluding tests marked "%s" which are set to skip with --windows %s: %s'
% ('", "'.join(skip_aliases), ', '.join(args.windows), ', '.join(all_skipped)))
if not_skipped:
for target, skip_valid, skip_missing in not_skipped:
# warn when failing to skip due to lack of support for skipping only some versions
display.warning('Including test "%s" which was marked to skip for --windows %s but not %s.'
% (target, ', '.join(skip_valid), ', '.join(skip_missing)))
def get_integration_local_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
common_integration_filter(args, targets, exclude)
if not args.allow_root and os.getuid() != 0:
skip = 'needs/root/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --allow-root or running as root: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_destructive = set(target for target in args.include if target.startswith('destructive/'))
if not args.allow_destructive:
skip = 'destructive/'
override = [target.name for target in targets if override_destructive & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-destructive or prefixing with "destructive/" to run locally: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
exclude_targets_by_python_version(targets, args.python_version, exclude)
return exclude
def get_integration_docker_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
common_integration_filter(args, targets, exclude)
skip = 'skip/docker/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which cannot run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
if not args.docker_privileged:
skip = 'needs/privileged/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
python_version = get_python_version(args, get_docker_completion(), args.docker_raw)
exclude_targets_by_python_version(targets, python_version, exclude)
return exclude
def get_integration_remote_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
remote = args.parsed_remote
exclude = []
common_integration_filter(args, targets, exclude)
skips = {
'skip/%s' % remote.platform: remote.platform,
'skip/%s/%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version),
'skip/%s%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version), # legacy syntax, use above format
}
if remote.arch:
skips.update({
'skip/%s/%s' % (remote.arch, remote.platform): '%s on %s' % (remote.platform, remote.arch),
'skip/%s/%s/%s' % (remote.arch, remote.platform, remote.version): '%s %s on %s' % (remote.platform, remote.version, remote.arch),
})
for skip, description in skips.items():
skipped = [target.name for target in targets if skip in target.skips]
if skipped:
exclude.append(skip + '/')
display.warning('Excluding tests marked "%s" which are not supported on %s: %s' % (skip, description, ', '.join(skipped)))
python_version = get_python_version(args, get_remote_completion(), args.remote)
exclude_targets_by_python_version(targets, python_version, exclude)
return exclude
def exclude_targets_by_python_version(targets, python_version, exclude):
"""
:type targets: tuple[IntegrationTarget]
:type python_version: str
:type exclude: list[str]
"""
if not python_version:
display.warning('Python version unknown. Unable to skip tests based on Python version.')
return
python_major_version = python_version.split('.')[0]
skip = 'skip/python%s/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
skip = 'skip/python%s/' % python_major_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
def get_python_version(args, configs, name):
"""
:type args: EnvironmentConfig
:type configs: dict[str, dict[str, str]]
:type name: str
"""
config = configs.get(name, {})
config_python = config.get('python')
if not config or not config_python:
if args.python:
return args.python
display.warning('No Python version specified. '
'Use completion config or the --python option to specify one.', unique=True)
return '' # failure to provide a version may result in failures or reduced functionality later
supported_python_versions = config_python.split(',')
default_python_version = supported_python_versions[0]
if args.python and args.python not in supported_python_versions:
raise ApplicationError('Python %s is not supported by %s. Supported Python version(s) are: %s' % (
args.python, name, ', '.join(sorted(supported_python_versions))))
python_version = args.python or default_python_version
return python_version
def get_python_interpreter(args, configs, name):
"""
:type args: EnvironmentConfig
:type configs: dict[str, dict[str, str]]
:type name: str
"""
if args.python_interpreter:
return args.python_interpreter
config = configs.get(name, {})
if not config:
if args.python:
guess = 'python%s' % args.python
else:
guess = 'python'
display.warning('Using "%s" as the Python interpreter. '
'Use completion config or the --python-interpreter option to specify the path.' % guess, unique=True)
return guess
python_version = get_python_version(args, configs, name)
python_dir = config.get('python_dir', '/usr/bin')
python_interpreter = os.path.join(python_dir, 'python%s' % python_version)
python_interpreter = config.get('python%s' % python_version, python_interpreter)
return python_interpreter
class EnvironmentDescription:
"""Description of current running environment."""
def __init__(self, args):
"""Initialize snapshot of environment configuration.
:type args: IntegrationConfig
"""
self.args = args
if self.args.explain:
self.data = {}
return
warnings = []
versions = ['']
versions += SUPPORTED_PYTHON_VERSIONS
versions += list(set(v.split('.')[0] for v in SUPPORTED_PYTHON_VERSIONS))
version_check = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'versions.py')
python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions))
pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v])
pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
known_hosts_hash = get_hash(os.path.expanduser('~/.ssh/known_hosts'))
for version in sorted(versions):
self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings)
for warning in warnings:
display.warning(warning, unique=True)
self.data = dict(
python_paths=python_paths,
pip_paths=pip_paths,
program_versions=program_versions,
pip_interpreters=pip_interpreters,
known_hosts_hash=known_hosts_hash,
warnings=warnings,
)
@staticmethod
def check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings):
"""
:type version: str
:param python_paths: dict[str, str]
:param pip_paths: dict[str, str]
:param pip_interpreters: dict[str, str]
:param warnings: list[str]
"""
python_label = 'Python%s' % (' %s' % version if version else '')
pip_path = pip_paths.get(version)
python_path = python_paths.get(version)
if not python_path or not pip_path:
# skip checks when either python or pip are missing for this version
return
pip_shebang = pip_interpreters.get(version)
match = re.search(r'#!\s*(?P<command>[^\s]+)', pip_shebang)
if not match:
warnings.append('A %s pip was found at "%s", but it does not have a valid shebang: %s' % (python_label, pip_path, pip_shebang))
return
pip_interpreter = os.path.realpath(match.group('command'))
python_interpreter = os.path.realpath(python_path)
if pip_interpreter == python_interpreter:
return
try:
identical = filecmp.cmp(pip_interpreter, python_interpreter)
except OSError:
identical = False
if identical:
return
warnings.append('A %s pip was found at "%s", but it uses interpreter "%s" instead of "%s".' % (
python_label, pip_path, pip_interpreter, python_interpreter))
def __str__(self):
"""
:rtype: str
"""
return json.dumps(self.data, sort_keys=True, indent=4)
def validate(self, target_name, throw):
"""
:type target_name: str
:type throw: bool
:rtype: bool
"""
current = EnvironmentDescription(self.args)
return self.check(self, current, target_name, throw)
@staticmethod
def check(original, current, target_name, throw):
"""
:type original: EnvironmentDescription
:type current: EnvironmentDescription
:type target_name: str
:type throw: bool
:rtype: bool
"""
original_json = str(original)
current_json = str(current)
if original_json == current_json:
return True
unified_diff = '\n'.join(difflib.unified_diff(
a=original_json.splitlines(),
b=current_json.splitlines(),
fromfile='original.json',
tofile='current.json',
lineterm='',
))
message = ('Test target "%s" has changed the test environment!\n'
'If these changes are necessary, they must be reverted before the test finishes.\n'
'>>> Original Environment\n'
'%s\n'
'>>> Current Environment\n'
'%s\n'
'>>> Environment Diff\n'
'%s'
% (target_name, original_json, current_json, unified_diff))
if throw:
raise ApplicationError(message)
display.error(message)
return False
@staticmethod
def get_version(command, warnings):
"""
:type command: list[str]
:type warnings: list[text]
:rtype: list[str]
"""
try:
stdout, stderr = raw_command(command, capture=True, cmd_verbosity=2)
except SubprocessError as ex:
warnings.append(u'%s' % ex)
return None # all failures are equal, we don't care why it failed, only that it did
return [line.strip() for line in ((stdout or '').strip() + (stderr or '').strip()).splitlines()]
@staticmethod
def get_shebang(path):
"""
:type path: str
:rtype: str
"""
with open_text_file(path) as script_fd:
return script_fd.readline().strip()
class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found."""
def __init__(self):
super(NoChangesDetected, self).__init__('No changes detected.')
class NoTestsForChanges(ApplicationWarning):
"""Exception when changes detected, but no tests trigger as a result."""
def __init__(self):
super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
class Delegate(Exception):
"""Trigger command delegation."""
def __init__(self, exclude=None, require=None, integration_targets=None):
"""
:type exclude: list[str] | None
:type require: list[str] | None
:type integration_targets: tuple[IntegrationTarget] | None
"""
super(Delegate, self).__init__()
self.exclude = exclude or []
self.require = require or []
self.integration_targets = integration_targets or tuple()
class AllTargetsSkipped(ApplicationWarning):
"""All targets skipped."""
def __init__(self):
super(AllTargetsSkipped, self).__init__('All targets skipped.')
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,014 |
ansible-test units --docker=default throwing Certificate issues when running ansible_test/_data/quiet_pip.py
|
### Summary
This started happening a few hours ago when we run
ansible-test units --docker=default we get the cert issues below from ansible_test/_data/quite_pip.py
I've tried this on my mac (on the personal network), and a few linux Vm (on work network) all got the same issue. I've already recreated this issue in github actions, you can see the output of the job here. https://github.com/ansible-collections/netapp/actions/runs/681234941
If i give a python version like
ansible-test units --docker=default --python 3.5 then it will work, but if i don't give a python version it will get the error below.
ansible-test sanity --docker=default works fine
```python-traceback
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
$ ansible --version
carchi@carchi-mac-1 ontap (master) $ ansible --version
ansible 2.10.6
config file = None
configured module search path = ['/Users/carchi/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/ansible
executable location = /Library/Frameworks/Python.framework/Versions/3.9/bin/ansible
python version = 3.9.1 (v3.9.1:1e5d33e9b9, Dec 7 2020, 12:10:52) [Clang 6.0 (clang-600.0.57)]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $
```
### OS / Environment
I can replicate this on my mac, and a Linux VM.
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
Run ansible-test units --docker=default on a collection
```
### Expected Results
Unit test to run.
### Actual Results
```console (paste below)
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
|
https://github.com/ansible/ansible/issues/74014
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-03-23T23:07:33Z |
python
| 2021-04-08T22:47:23Z |
changelogs/fragments/ansible-test-pypi-test-container.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,014 |
ansible-test units --docker=default throwing Certificate issues when running ansible_test/_data/quiet_pip.py
|
### Summary
This started happening a few hours ago when we run
ansible-test units --docker=default we get the cert issues below from ansible_test/_data/quite_pip.py
I've tried this on my mac (on the personal network), and a few linux Vm (on work network) all got the same issue. I've already recreated this issue in github actions, you can see the output of the job here. https://github.com/ansible-collections/netapp/actions/runs/681234941
If i give a python version like
ansible-test units --docker=default --python 3.5 then it will work, but if i don't give a python version it will get the error below.
ansible-test sanity --docker=default works fine
```python-traceback
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
$ ansible --version
carchi@carchi-mac-1 ontap (master) $ ansible --version
ansible 2.10.6
config file = None
configured module search path = ['/Users/carchi/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/ansible
executable location = /Library/Frameworks/Python.framework/Versions/3.9/bin/ansible
python version = 3.9.1 (v3.9.1:1e5d33e9b9, Dec 7 2020, 12:10:52) [Clang 6.0 (clang-600.0.57)]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $
```
### OS / Environment
I can replicate this on my mac, and a Linux VM.
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
Run ansible-test units --docker=default on a collection
```
### Expected Results
Unit test to run.
### Actual Results
```console (paste below)
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
|
https://github.com/ansible/ansible/issues/74014
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-03-23T23:07:33Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_data/quiet_pip.py
|
"""Custom entry-point for pip that filters out unwanted logging and warnings."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import logging
import re
import runpy
import warnings
BUILTIN_FILTERER_FILTER = logging.Filterer.filter
LOGGING_MESSAGE_FILTER = re.compile("^("
".*Running pip install with root privileges is generally not a good idea.*|" # custom Fedora patch [1]
"DEPRECATION: Python 2.7 will reach the end of its life .*|" # pip 19.2.3
"Ignoring .*: markers .* don't match your environment|"
"Requirement already satisfied.*"
")$")
# [1] https://src.fedoraproject.org/rpms/python-pip/blob/master/f/emit-a-warning-when-running-with-root-privileges.patch
WARNING_MESSAGE_FILTERS = (
# DEPRECATION: Python 2.6 is no longer supported by the Python core team, please upgrade your Python.
# A future version of pip will drop support for Python 2.6
'Python 2.6 is no longer supported by the Python core team, ',
# {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:137: InsecurePlatformWarning:
# A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail.
# You can upgrade to a newer version of Python to solve this.
# For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings
'A true SSLContext object is not available. ',
# {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:339: SNIMissingWarning:
# An HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform.
# This may cause the server to present an incorrect TLS certificate, which can cause validation failures.
# You can upgrade to a newer version of Python to solve this.
# For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings
'An HTTPS request has been made, but the SNI ',
# DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained.
# pip 21.0 will drop support for Python 2.7 in January 2021.
# More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support
'DEPRECATION: Python 2.7 reached the end of its life ',
# DEPRECATION: Python 3.5 reached the end of its life on September 13th, 2020. Please upgrade your Python as Python 3.5 is no longer maintained.
# pip 21.0 will drop support for Python 3.5 in January 2021. pip 21.0 will remove support for this functionality.
'DEPRECATION: Python 3.5 reached the end of its life ',
)
def custom_filterer_filter(self, record):
"""Globally omit logging of unwanted messages."""
if LOGGING_MESSAGE_FILTER.search(record.getMessage()):
return 0
return BUILTIN_FILTERER_FILTER(self, record)
def main():
"""Main program entry point."""
# Filtering logging output globally avoids having to intercept stdout/stderr.
# It also avoids problems with loss of color output and mixing up the order of stdout/stderr messages.
logging.Filterer.filter = custom_filterer_filter
for message_filter in WARNING_MESSAGE_FILTERS:
# Setting filterwarnings in code is necessary because of the following:
# Python 2.6 does not support the PYTHONWARNINGS environment variable. It does support the -W option.
# Python 2.7 cannot use the -W option to match warning text after a colon. This makes it impossible to match specific warning messages.
warnings.filterwarnings('ignore', message_filter)
runpy.run_module('pip.__main__', run_name='__main__', alter_sys=True)
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,014 |
ansible-test units --docker=default throwing Certificate issues when running ansible_test/_data/quiet_pip.py
|
### Summary
This started happening a few hours ago when we run
ansible-test units --docker=default we get the cert issues below from ansible_test/_data/quite_pip.py
I've tried this on my mac (on the personal network), and a few linux Vm (on work network) all got the same issue. I've already recreated this issue in github actions, you can see the output of the job here. https://github.com/ansible-collections/netapp/actions/runs/681234941
If i give a python version like
ansible-test units --docker=default --python 3.5 then it will work, but if i don't give a python version it will get the error below.
ansible-test sanity --docker=default works fine
```python-traceback
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
$ ansible --version
carchi@carchi-mac-1 ontap (master) $ ansible --version
ansible 2.10.6
config file = None
configured module search path = ['/Users/carchi/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/ansible
executable location = /Library/Frameworks/Python.framework/Versions/3.9/bin/ansible
python version = 3.9.1 (v3.9.1:1e5d33e9b9, Dec 7 2020, 12:10:52) [Clang 6.0 (clang-600.0.57)]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $
```
### OS / Environment
I can replicate this on my mac, and a Linux VM.
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
Run ansible-test units --docker=default on a collection
```
### Expected Results
Unit test to run.
### Actual Results
```console (paste below)
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
|
https://github.com/ansible/ansible/issues/74014
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-03-23T23:07:33Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/cli.py
|
"""Test runner for all Ansible tests."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import errno
import os
import sys
# This import should occur as early as possible.
# It must occur before subprocess has been imported anywhere in the current process.
from .init import (
CURRENT_RLIMIT_NOFILE,
)
from . import types as t
from .util import (
ApplicationError,
display,
raw_command,
generate_pip_command,
read_lines_without_comments,
MAXFD,
ANSIBLE_TEST_DATA_ROOT,
)
from .delegation import (
check_delegation_args,
delegate,
)
from .executor import (
command_posix_integration,
command_network_integration,
command_windows_integration,
command_shell,
SUPPORTED_PYTHON_VERSIONS,
ApplicationWarning,
Delegate,
generate_pip_install,
check_startup,
)
from .config import (
PosixIntegrationConfig,
WindowsIntegrationConfig,
NetworkIntegrationConfig,
SanityConfig,
UnitsConfig,
ShellConfig,
)
from .env import (
EnvConfig,
command_env,
configure_timeout,
)
from .sanity import (
command_sanity,
sanity_init,
sanity_get_tests,
)
from .units import (
command_units,
)
from .target import (
find_target_completion,
walk_posix_integration_targets,
walk_network_integration_targets,
walk_windows_integration_targets,
walk_units_targets,
walk_sanity_targets,
)
from .cloud import (
initialize_cloud_plugins,
)
from .core_ci import (
AnsibleCoreCI,
)
from .data import (
data_context,
)
from .util_common import (
get_docker_completion,
get_network_completion,
get_remote_completion,
CommonConfig,
)
from .coverage.combine import (
command_coverage_combine,
)
from .coverage.erase import (
command_coverage_erase,
)
from .coverage.html import (
command_coverage_html,
)
from .coverage.report import (
command_coverage_report,
CoverageReportConfig,
)
from .coverage.xml import (
command_coverage_xml,
)
from .coverage.analyze.targets.generate import (
command_coverage_analyze_targets_generate,
CoverageAnalyzeTargetsGenerateConfig,
)
from .coverage.analyze.targets.expand import (
command_coverage_analyze_targets_expand,
CoverageAnalyzeTargetsExpandConfig,
)
from .coverage.analyze.targets.filter import (
command_coverage_analyze_targets_filter,
CoverageAnalyzeTargetsFilterConfig,
)
from .coverage.analyze.targets.combine import (
command_coverage_analyze_targets_combine,
CoverageAnalyzeTargetsCombineConfig,
)
from .coverage.analyze.targets.missing import (
command_coverage_analyze_targets_missing,
CoverageAnalyzeTargetsMissingConfig,
)
from .coverage import (
COVERAGE_GROUPS,
CoverageConfig,
)
if t.TYPE_CHECKING:
import argparse as argparse_module
def main():
"""Main program function."""
try:
os.chdir(data_context().content.root)
initialize_cloud_plugins()
sanity_init()
args = parse_args()
config = args.config(args) # type: CommonConfig
display.verbosity = config.verbosity
display.truncate = config.truncate
display.redact = config.redact
display.color = config.color
display.info_stderr = config.info_stderr
check_startup()
check_delegation_args(config)
configure_timeout(config)
display.info('RLIMIT_NOFILE: %s' % (CURRENT_RLIMIT_NOFILE,), verbosity=2)
display.info('MAXFD: %d' % MAXFD, verbosity=2)
try:
args.func(config)
delegate_args = None
except Delegate as ex:
# save delegation args for use once we exit the exception handler
delegate_args = (ex.exclude, ex.require, ex.integration_targets)
if delegate_args:
# noinspection PyTypeChecker
delegate(config, *delegate_args)
display.review_warnings()
except ApplicationWarning as ex:
display.warning(u'%s' % ex)
sys.exit(0)
except ApplicationError as ex:
display.error(u'%s' % ex)
sys.exit(1)
except KeyboardInterrupt:
sys.exit(2)
except IOError as ex:
if ex.errno == errno.EPIPE:
sys.exit(3)
raise
def parse_args():
"""Parse command line arguments."""
try:
import argparse
except ImportError:
if '--requirements' not in sys.argv:
raise
# install argparse without using constraints since pip may be too old to support them
# not using the ansible-test requirements file since this install is for sys.executable rather than the delegated python (which may be different)
# argparse has no special requirements, so upgrading pip is not required here
raw_command(generate_pip_install(generate_pip_command(sys.executable), '', packages=['argparse'], use_constraints=False))
import argparse
try:
import argcomplete
except ImportError:
argcomplete = None
if argcomplete:
epilog = 'Tab completion available using the "argcomplete" python package.'
else:
epilog = 'Install the "argcomplete" python package to enable tab completion.'
def key_value_type(value): # type: (str) -> t.Tuple[str, str]
"""Wrapper around key_value."""
return key_value(argparse, value)
parser = argparse.ArgumentParser(epilog=epilog)
common = argparse.ArgumentParser(add_help=False)
common.add_argument('-e', '--explain',
action='store_true',
help='explain commands that would be executed')
common.add_argument('-v', '--verbose',
dest='verbosity',
action='count',
default=0,
help='display more output')
common.add_argument('--color',
metavar='COLOR',
nargs='?',
help='generate color output: %(choices)s',
choices=('yes', 'no', 'auto'),
const='yes',
default='auto')
common.add_argument('--debug',
action='store_true',
help='run ansible commands in debug mode')
# noinspection PyTypeChecker
common.add_argument('--truncate',
dest='truncate',
metavar='COLUMNS',
type=int,
default=display.columns,
help='truncate some long output (0=disabled) (default: auto)')
common.add_argument('--redact',
dest='redact',
action='store_true',
default=True,
help='redact sensitive values in output')
common.add_argument('--no-redact',
dest='redact',
action='store_false',
default=False,
help='show sensitive values in output')
common.add_argument('--check-python',
choices=SUPPORTED_PYTHON_VERSIONS,
help=argparse.SUPPRESS)
test = argparse.ArgumentParser(add_help=False, parents=[common])
test.add_argument('include',
metavar='TARGET',
nargs='*',
help='test the specified target').completer = complete_target
test.add_argument('--include',
metavar='TARGET',
action='append',
help='include the specified target').completer = complete_target
test.add_argument('--exclude',
metavar='TARGET',
action='append',
help='exclude the specified target').completer = complete_target
test.add_argument('--require',
metavar='TARGET',
action='append',
help='require the specified target').completer = complete_target
test.add_argument('--coverage',
action='store_true',
help='analyze code coverage when running tests')
test.add_argument('--coverage-label',
default='',
help='label to include in coverage output file names')
test.add_argument('--coverage-check',
action='store_true',
help='only verify code coverage can be enabled')
test.add_argument('--metadata',
help=argparse.SUPPRESS)
test.add_argument('--base-branch',
help='base branch used for change detection')
add_changes(test, argparse)
add_environments(test)
integration = argparse.ArgumentParser(add_help=False, parents=[test])
integration.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
integration.add_argument('--start-at',
metavar='TARGET',
help='start at the specified target').completer = complete_target
integration.add_argument('--start-at-task',
metavar='TASK',
help='start at the specified task')
integration.add_argument('--tags',
metavar='TAGS',
help='only run plays and tasks tagged with these values')
integration.add_argument('--skip-tags',
metavar='TAGS',
help='only run plays and tasks whose tags do not match these values')
integration.add_argument('--diff',
action='store_true',
help='show diff output')
integration.add_argument('--allow-destructive',
action='store_true',
help='allow destructive tests')
integration.add_argument('--allow-root',
action='store_true',
help='allow tests requiring root when not root')
integration.add_argument('--allow-disabled',
action='store_true',
help='allow tests which have been marked as disabled')
integration.add_argument('--allow-unstable',
action='store_true',
help='allow tests which have been marked as unstable')
integration.add_argument('--allow-unstable-changed',
action='store_true',
help='allow tests which have been marked as unstable when focused changes are detected')
integration.add_argument('--allow-unsupported',
action='store_true',
help='allow tests which have been marked as unsupported')
integration.add_argument('--retry-on-error',
action='store_true',
help='retry failed test with increased verbosity')
integration.add_argument('--continue-on-error',
action='store_true',
help='continue after failed test')
integration.add_argument('--debug-strategy',
action='store_true',
help='run test playbooks using the debug strategy')
integration.add_argument('--changed-all-target',
metavar='TARGET',
default='all',
help='target to run when all tests are needed')
integration.add_argument('--changed-all-mode',
metavar='MODE',
choices=('default', 'include', 'exclude'),
help='include/exclude behavior with --changed-all-target: %(choices)s')
integration.add_argument('--list-targets',
action='store_true',
help='list matching targets instead of running tests')
integration.add_argument('--no-temp-workdir',
action='store_true',
help='do not run tests from a temporary directory (use only for verifying broken tests)')
integration.add_argument('--no-temp-unicode',
action='store_true',
help='avoid unicode characters in temporary directory (use only for verifying broken tests)')
subparsers = parser.add_subparsers(metavar='COMMAND')
subparsers.required = True # work-around for python 3 bug which makes subparsers optional
posix_integration = subparsers.add_parser('integration',
parents=[integration],
help='posix integration tests')
posix_integration.set_defaults(func=command_posix_integration,
targets=walk_posix_integration_targets,
config=PosixIntegrationConfig)
add_extra_docker_options(posix_integration)
add_httptester_options(posix_integration, argparse)
network_integration = subparsers.add_parser('network-integration',
parents=[integration],
help='network integration tests')
network_integration.set_defaults(func=command_network_integration,
targets=walk_network_integration_targets,
config=NetworkIntegrationConfig)
add_extra_docker_options(network_integration, integration=False)
network_integration.add_argument('--platform',
metavar='PLATFORM',
action='append',
help='network platform/version').completer = complete_network_platform
network_integration.add_argument('--platform-collection',
type=key_value_type,
metavar='PLATFORM=COLLECTION',
action='append',
help='collection used to test platform').completer = complete_network_platform_collection
network_integration.add_argument('--platform-connection',
type=key_value_type,
metavar='PLATFORM=CONNECTION',
action='append',
help='connection used to test platform').completer = complete_network_platform_connection
network_integration.add_argument('--inventory',
metavar='PATH',
help='path to inventory used for tests')
network_integration.add_argument('--testcase',
metavar='TESTCASE',
help='limit a test to a specified testcase').completer = complete_network_testcase
windows_integration = subparsers.add_parser('windows-integration',
parents=[integration],
help='windows integration tests')
windows_integration.set_defaults(func=command_windows_integration,
targets=walk_windows_integration_targets,
config=WindowsIntegrationConfig)
add_extra_docker_options(windows_integration, integration=False)
add_httptester_options(windows_integration, argparse)
windows_integration.add_argument('--windows',
metavar='VERSION',
action='append',
help='windows version').completer = complete_windows
windows_integration.add_argument('--inventory',
metavar='PATH',
help='path to inventory used for tests')
units = subparsers.add_parser('units',
parents=[test],
help='unit tests')
units.set_defaults(func=command_units,
targets=walk_units_targets,
config=UnitsConfig)
units.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
units.add_argument('--collect-only',
action='store_true',
help='collect tests but do not execute them')
# noinspection PyTypeChecker
units.add_argument('--num-workers',
type=int,
help='number of workers to use (default: auto)')
units.add_argument('--requirements-mode',
choices=('only', 'skip'),
help=argparse.SUPPRESS)
add_extra_docker_options(units, integration=False)
sanity = subparsers.add_parser('sanity',
parents=[test],
help='sanity tests')
sanity.set_defaults(func=command_sanity,
targets=walk_sanity_targets,
config=SanityConfig)
sanity.add_argument('--test',
metavar='TEST',
action='append',
choices=[test.name for test in sanity_get_tests()],
help='tests to run').completer = complete_sanity_test
sanity.add_argument('--skip-test',
metavar='TEST',
action='append',
choices=[test.name for test in sanity_get_tests()],
help='tests to skip').completer = complete_sanity_test
sanity.add_argument('--allow-disabled',
action='store_true',
help='allow tests to run which are disabled by default')
sanity.add_argument('--list-tests',
action='store_true',
help='list available tests')
sanity.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
sanity.add_argument('--enable-optional-errors',
action='store_true',
help='enable optional errors')
add_lint(sanity)
add_extra_docker_options(sanity, integration=False)
shell = subparsers.add_parser('shell',
parents=[common],
help='open an interactive shell')
shell.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
shell.set_defaults(func=command_shell,
config=ShellConfig)
shell.add_argument('--raw',
action='store_true',
help='direct to shell with no setup')
add_environments(shell)
add_extra_docker_options(shell)
add_httptester_options(shell, argparse)
coverage_common = argparse.ArgumentParser(add_help=False, parents=[common])
add_environments(coverage_common, isolated_delegation=False)
coverage = subparsers.add_parser('coverage',
help='code coverage management and reporting')
coverage_subparsers = coverage.add_subparsers(metavar='COMMAND')
coverage_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
add_coverage_analyze(coverage_subparsers, coverage_common)
coverage_combine = coverage_subparsers.add_parser('combine',
parents=[coverage_common],
help='combine coverage data and rewrite remote paths')
coverage_combine.set_defaults(func=command_coverage_combine,
config=CoverageConfig)
coverage_combine.add_argument('--export',
help='directory to export combined coverage files to')
add_extra_coverage_options(coverage_combine)
coverage_erase = coverage_subparsers.add_parser('erase',
parents=[coverage_common],
help='erase coverage data files')
coverage_erase.set_defaults(func=command_coverage_erase,
config=CoverageConfig)
coverage_report = coverage_subparsers.add_parser('report',
parents=[coverage_common],
help='generate console coverage report')
coverage_report.set_defaults(func=command_coverage_report,
config=CoverageReportConfig)
coverage_report.add_argument('--show-missing',
action='store_true',
help='show line numbers of statements not executed')
coverage_report.add_argument('--include',
metavar='PAT1,PAT2,...',
help='include only files whose paths match one of these '
'patterns. Accepts shell-style wildcards, which must be '
'quoted.')
coverage_report.add_argument('--omit',
metavar='PAT1,PAT2,...',
help='omit files whose paths match one of these patterns. '
'Accepts shell-style wildcards, which must be quoted.')
add_extra_coverage_options(coverage_report)
coverage_html = coverage_subparsers.add_parser('html',
parents=[coverage_common],
help='generate html coverage report')
coverage_html.set_defaults(func=command_coverage_html,
config=CoverageConfig)
add_extra_coverage_options(coverage_html)
coverage_xml = coverage_subparsers.add_parser('xml',
parents=[coverage_common],
help='generate xml coverage report')
coverage_xml.set_defaults(func=command_coverage_xml,
config=CoverageConfig)
add_extra_coverage_options(coverage_xml)
env = subparsers.add_parser('env',
parents=[common],
help='show information about the test environment')
env.set_defaults(func=command_env,
config=EnvConfig)
env.add_argument('--show',
action='store_true',
help='show environment on stdout')
env.add_argument('--dump',
action='store_true',
help='dump environment to disk')
env.add_argument('--list-files',
action='store_true',
help='list files on stdout')
# noinspection PyTypeChecker
env.add_argument('--timeout',
type=int,
metavar='MINUTES',
help='timeout for future ansible-test commands (0 clears)')
if argcomplete:
argcomplete.autocomplete(parser, always_complete_options=False, validator=lambda i, k: True)
args = parser.parse_args()
if args.explain and not args.verbosity:
args.verbosity = 1
if args.color == 'yes':
args.color = True
elif args.color == 'no':
args.color = False
else:
args.color = sys.stdout.isatty()
return args
def key_value(argparse, value): # type: (argparse_module, str) -> t.Tuple[str, str]
"""Type parsing and validation for argparse key/value pairs separated by an '=' character."""
parts = value.split('=')
if len(parts) != 2:
raise argparse.ArgumentTypeError('"%s" must be in the format "key=value"' % value)
return parts[0], parts[1]
# noinspection PyProtectedMember,PyUnresolvedReferences
def add_coverage_analyze(coverage_subparsers, coverage_common): # type: (argparse_module._SubParsersAction, argparse_module.ArgumentParser) -> None
"""Add the `coverage analyze` subcommand."""
analyze = coverage_subparsers.add_parser(
'analyze',
help='analyze collected coverage data',
)
analyze_subparsers = analyze.add_subparsers(metavar='COMMAND')
analyze_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
targets = analyze_subparsers.add_parser(
'targets',
help='analyze integration test target coverage',
)
targets_subparsers = targets.add_subparsers(metavar='COMMAND')
targets_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
targets_generate = targets_subparsers.add_parser(
'generate',
parents=[coverage_common],
help='aggregate coverage by integration test target',
)
targets_generate.set_defaults(
func=command_coverage_analyze_targets_generate,
config=CoverageAnalyzeTargetsGenerateConfig,
)
targets_generate.add_argument(
'input_dir',
nargs='?',
help='directory to read coverage from',
)
targets_generate.add_argument(
'output_file',
help='output file for aggregated coverage',
)
targets_expand = targets_subparsers.add_parser(
'expand',
parents=[coverage_common],
help='expand target names from integers in aggregated coverage',
)
targets_expand.set_defaults(
func=command_coverage_analyze_targets_expand,
config=CoverageAnalyzeTargetsExpandConfig,
)
targets_expand.add_argument(
'input_file',
help='input file to read aggregated coverage from',
)
targets_expand.add_argument(
'output_file',
help='output file to write expanded coverage to',
)
targets_filter = targets_subparsers.add_parser(
'filter',
parents=[coverage_common],
help='filter aggregated coverage data',
)
targets_filter.set_defaults(
func=command_coverage_analyze_targets_filter,
config=CoverageAnalyzeTargetsFilterConfig,
)
targets_filter.add_argument(
'input_file',
help='input file to read aggregated coverage from',
)
targets_filter.add_argument(
'output_file',
help='output file to write expanded coverage to',
)
targets_filter.add_argument(
'--include-target',
dest='include_targets',
action='append',
help='include the specified targets',
)
targets_filter.add_argument(
'--exclude-target',
dest='exclude_targets',
action='append',
help='exclude the specified targets',
)
targets_filter.add_argument(
'--include-path',
help='include paths matching the given regex',
)
targets_filter.add_argument(
'--exclude-path',
help='exclude paths matching the given regex',
)
targets_combine = targets_subparsers.add_parser(
'combine',
parents=[coverage_common],
help='combine multiple aggregated coverage files',
)
targets_combine.set_defaults(
func=command_coverage_analyze_targets_combine,
config=CoverageAnalyzeTargetsCombineConfig,
)
targets_combine.add_argument(
'input_file',
nargs='+',
help='input file to read aggregated coverage from',
)
targets_combine.add_argument(
'output_file',
help='output file to write aggregated coverage to',
)
targets_missing = targets_subparsers.add_parser(
'missing',
parents=[coverage_common],
help='identify coverage in one file missing in another',
)
targets_missing.set_defaults(
func=command_coverage_analyze_targets_missing,
config=CoverageAnalyzeTargetsMissingConfig,
)
targets_missing.add_argument(
'from_file',
help='input file containing aggregated coverage',
)
targets_missing.add_argument(
'to_file',
help='input file containing aggregated coverage',
)
targets_missing.add_argument(
'output_file',
help='output file to write aggregated coverage to',
)
targets_missing.add_argument(
'--only-gaps',
action='store_true',
help='report only arcs/lines not hit by any target',
)
targets_missing.add_argument(
'--only-exists',
action='store_true',
help='limit results to files that exist',
)
def add_lint(parser):
"""
:type parser: argparse.ArgumentParser
"""
parser.add_argument('--lint',
action='store_true',
help='write lint output to stdout, everything else stderr')
parser.add_argument('--junit',
action='store_true',
help='write test failures to junit xml files')
parser.add_argument('--failure-ok',
action='store_true',
help='exit successfully on failed tests after saving results')
def add_changes(parser, argparse):
"""
:type parser: argparse.ArgumentParser
:type argparse: argparse
"""
parser.add_argument('--changed', action='store_true', help='limit targets based on changes')
changes = parser.add_argument_group(title='change detection arguments')
changes.add_argument('--tracked', action='store_true', help=argparse.SUPPRESS)
changes.add_argument('--untracked', action='store_true', help='include untracked files')
changes.add_argument('--ignore-committed', dest='committed', action='store_false', help='exclude committed files')
changes.add_argument('--ignore-staged', dest='staged', action='store_false', help='exclude staged files')
changes.add_argument('--ignore-unstaged', dest='unstaged', action='store_false', help='exclude unstaged files')
changes.add_argument('--changed-from', metavar='PATH', help=argparse.SUPPRESS)
changes.add_argument('--changed-path', metavar='PATH', action='append', help=argparse.SUPPRESS)
def add_environments(parser, isolated_delegation=True):
"""
:type parser: argparse.ArgumentParser
:type isolated_delegation: bool
"""
parser.add_argument('--requirements',
action='store_true',
help='install command requirements')
parser.add_argument('--python-interpreter',
metavar='PATH',
default=None,
help='path to the docker or remote python interpreter')
parser.add_argument('--no-pip-check',
dest='pip_check',
default=True,
action='store_false',
help='do not run "pip check" to verify requirements')
environments = parser.add_mutually_exclusive_group()
environments.add_argument('--local',
action='store_true',
help='run from the local environment')
environments.add_argument('--venv',
action='store_true',
help='run from ansible-test managed virtual environments')
venv = parser.add_argument_group(title='venv arguments')
venv.add_argument('--venv-system-site-packages',
action='store_true',
help='enable system site packages')
if not isolated_delegation:
environments.set_defaults(
docker=None,
remote=None,
remote_stage=None,
remote_provider=None,
remote_terminate=None,
remote_endpoint=None,
python_interpreter=None,
)
return
environments.add_argument('--docker',
metavar='IMAGE',
nargs='?',
default=None,
const='default',
help='run from a docker container').completer = complete_docker
environments.add_argument('--remote',
metavar='PLATFORM',
default=None,
help='run from a remote instance').completer = complete_remote_shell if parser.prog.endswith(' shell') else complete_remote
remote = parser.add_argument_group(title='remote arguments')
remote.add_argument('--remote-stage',
metavar='STAGE',
help='remote stage to use: prod, dev',
default='prod').completer = complete_remote_stage
remote.add_argument('--remote-provider',
metavar='PROVIDER',
help='remote provider to use: %(choices)s',
choices=['default'] + sorted(AnsibleCoreCI.PROVIDERS.keys()),
default='default')
remote.add_argument('--remote-endpoint',
metavar='ENDPOINT',
help='remote provisioning endpoint to use (default: auto)',
default=None)
remote.add_argument('--remote-terminate',
metavar='WHEN',
help='terminate remote instance: %(choices)s (default: %(default)s)',
choices=['never', 'always', 'success'],
default='never')
def add_extra_coverage_options(parser):
"""
:type parser: argparse.ArgumentParser
"""
parser.add_argument('--group-by',
metavar='GROUP',
action='append',
choices=COVERAGE_GROUPS,
help='group output by: %s' % ', '.join(COVERAGE_GROUPS))
parser.add_argument('--all',
action='store_true',
help='include all python/powershell source files')
parser.add_argument('--stub',
action='store_true',
help='generate empty report of all python/powershell source files')
def add_httptester_options(parser, argparse):
"""
:type parser: argparse.ArgumentParser
:type argparse: argparse
"""
group = parser.add_mutually_exclusive_group()
group.add_argument('--httptester',
metavar='IMAGE',
default='quay.io/ansible/http-test-container:1.3.0',
help='docker image to use for the httptester container')
group.add_argument('--disable-httptester',
dest='httptester',
action='store_const',
const='',
help='do not use the httptester container')
parser.add_argument('--inject-httptester',
action='store_true',
help=argparse.SUPPRESS) # internal use only
parser.add_argument('--httptester-krb5-password',
help=argparse.SUPPRESS) # internal use only
def add_extra_docker_options(parser, integration=True):
"""
:type parser: argparse.ArgumentParser
:type integration: bool
"""
docker = parser.add_argument_group(title='docker arguments')
docker.add_argument('--docker-no-pull',
action='store_false',
dest='docker_pull',
help='do not explicitly pull the latest docker images')
if data_context().content.is_ansible:
docker.add_argument('--docker-keep-git',
action='store_true',
help='transfer git related files into the docker container')
else:
docker.set_defaults(
docker_keep_git=False,
)
docker.add_argument('--docker-seccomp',
metavar='SC',
choices=('default', 'unconfined'),
default=None,
help='set seccomp confinement for the test container: %(choices)s')
docker.add_argument('--docker-terminate',
metavar='WHEN',
help='terminate docker container: %(choices)s (default: %(default)s)',
choices=['never', 'always', 'success'],
default='always')
if not integration:
return
docker.add_argument('--docker-privileged',
action='store_true',
help='run docker container in privileged mode')
docker.add_argument('--docker-network',
help='run using the specified docker network')
# noinspection PyTypeChecker
docker.add_argument('--docker-memory',
help='memory limit for docker in bytes', type=int)
# noinspection PyUnusedLocal
def complete_remote_stage(prefix, parsed_args, **_): # pylint: disable=unused-argument
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
return [stage for stage in ('prod', 'dev') if stage.startswith(prefix)]
def complete_target(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
return find_target_completion(parsed_args.targets, prefix)
# noinspection PyUnusedLocal
def complete_remote(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
del parsed_args
images = sorted(get_remote_completion().keys())
return [i for i in images if i.startswith(prefix)]
# noinspection PyUnusedLocal
def complete_remote_shell(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
del parsed_args
images = sorted(get_remote_completion().keys())
# 2008 doesn't support SSH so we do not add to the list of valid images
windows_completion_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', 'windows.txt')
images.extend(["windows/%s" % i for i in read_lines_without_comments(windows_completion_path, remove_blank_lines=True) if i != '2008'])
return [i for i in images if i.startswith(prefix)]
# noinspection PyUnusedLocal
def complete_docker(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
del parsed_args
images = sorted(get_docker_completion().keys())
return [i for i in images if i.startswith(prefix)]
def complete_windows(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
images = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', 'windows.txt'), remove_blank_lines=True)
return [i for i in images if i.startswith(prefix) and (not parsed_args.windows or i not in parsed_args.windows)]
def complete_network_platform(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
images = sorted(get_network_completion())
return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)]
def complete_network_platform_collection(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
left = prefix.split('=')[0]
images = sorted(set(image.split('/')[0] for image in get_network_completion()))
return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])]
def complete_network_platform_connection(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
left = prefix.split('=')[0]
images = sorted(set(image.split('/')[0] for image in get_network_completion()))
return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])]
def complete_network_testcase(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
testcases = []
# since testcases are module specific, don't autocomplete if more than one
# module is specidied
if len(parsed_args.include) != 1:
return []
test_dir = os.path.join(data_context().content.integration_targets_path, parsed_args.include[0], 'tests')
connection_dirs = data_context().content.get_dirs(test_dir)
for connection_dir in connection_dirs:
for testcase in [os.path.basename(path) for path in data_context().content.get_files(connection_dir)]:
if testcase.startswith(prefix):
testcases.append(testcase.split('.')[0])
return testcases
# noinspection PyUnusedLocal
def complete_sanity_test(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
del parsed_args
tests = sorted(test.name for test in sanity_get_tests())
return [i for i in tests if i.startswith(prefix)]
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,014 |
ansible-test units --docker=default throwing Certificate issues when running ansible_test/_data/quiet_pip.py
|
### Summary
This started happening a few hours ago when we run
ansible-test units --docker=default we get the cert issues below from ansible_test/_data/quite_pip.py
I've tried this on my mac (on the personal network), and a few linux Vm (on work network) all got the same issue. I've already recreated this issue in github actions, you can see the output of the job here. https://github.com/ansible-collections/netapp/actions/runs/681234941
If i give a python version like
ansible-test units --docker=default --python 3.5 then it will work, but if i don't give a python version it will get the error below.
ansible-test sanity --docker=default works fine
```python-traceback
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
$ ansible --version
carchi@carchi-mac-1 ontap (master) $ ansible --version
ansible 2.10.6
config file = None
configured module search path = ['/Users/carchi/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/ansible
executable location = /Library/Frameworks/Python.framework/Versions/3.9/bin/ansible
python version = 3.9.1 (v3.9.1:1e5d33e9b9, Dec 7 2020, 12:10:52) [Clang 6.0 (clang-600.0.57)]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $
```
### OS / Environment
I can replicate this on my mac, and a Linux VM.
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
Run ansible-test units --docker=default on a collection
```
### Expected Results
Unit test to run.
### Actual Results
```console (paste below)
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
|
https://github.com/ansible/ansible/issues/74014
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-03-23T23:07:33Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/config.py
|
"""Configuration classes."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
from . import types as t
from .util import (
find_python,
generate_password,
generate_pip_command,
ApplicationError,
)
from .util_common import (
docker_qualify_image,
get_docker_completion,
get_remote_completion,
CommonConfig,
)
from .metadata import (
Metadata,
)
from .data import (
data_context,
)
try:
# noinspection PyTypeChecker
TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound='IntegrationConfig')
except AttributeError:
TIntegrationConfig = None # pylint: disable=invalid-name
class ParsedRemote:
"""A parsed version of a "remote" string."""
def __init__(self, arch, platform, version): # type: (t.Optional[str], str, str) -> None
self.arch = arch
self.platform = platform
self.version = version
@staticmethod
def parse(value): # type: (str) -> t.Optional['ParsedRemote']
"""Return a ParsedRemote from the given value or None if the syntax is invalid."""
parts = value.split('/')
if len(parts) == 2:
arch = None
platform, version = parts
elif len(parts) == 3:
arch, platform, version = parts
else:
return None
return ParsedRemote(arch, platform, version)
class EnvironmentConfig(CommonConfig):
"""Configuration common to all commands which execute in an environment."""
def __init__(self, args, command):
"""
:type args: any
:type command: str
"""
super(EnvironmentConfig, self).__init__(args, command)
self.local = args.local is True
self.venv = args.venv
self.venv_system_site_packages = args.venv_system_site_packages
self.python = args.python if 'python' in args else None # type: str
self.docker = docker_qualify_image(args.docker) # type: str
self.docker_raw = args.docker # type: str
self.remote = args.remote # type: str
if self.remote:
self.parsed_remote = ParsedRemote.parse(self.remote)
if not self.parsed_remote or not self.parsed_remote.platform or not self.parsed_remote.version:
raise ApplicationError('Unrecognized remote "%s" syntax. Use "platform/version" or "arch/platform/version".' % self.remote)
else:
self.parsed_remote = None
self.docker_privileged = args.docker_privileged if 'docker_privileged' in args else False # type: bool
self.docker_pull = args.docker_pull if 'docker_pull' in args else False # type: bool
self.docker_keep_git = args.docker_keep_git if 'docker_keep_git' in args else False # type: bool
self.docker_seccomp = args.docker_seccomp if 'docker_seccomp' in args else None # type: str
self.docker_memory = args.docker_memory if 'docker_memory' in args else None
self.docker_terminate = args.docker_terminate if 'docker_terminate' in args else None # type: str
self.docker_network = args.docker_network if 'docker_network' in args else None # type: str
if self.docker_seccomp is None:
self.docker_seccomp = get_docker_completion().get(self.docker_raw, {}).get('seccomp', 'default')
self.remote_stage = args.remote_stage # type: str
self.remote_provider = args.remote_provider # type: str
self.remote_endpoint = args.remote_endpoint # type: t.Optional[str]
self.remote_terminate = args.remote_terminate # type: str
if self.remote_provider == 'default':
self.remote_provider = None
self.requirements = args.requirements # type: bool
if self.python == 'default':
self.python = None
actual_major_minor = '.'.join(str(i) for i in sys.version_info[:2])
self.python_version = self.python or actual_major_minor
self.python_interpreter = args.python_interpreter
self.pip_check = args.pip_check
self.delegate = self.docker or self.remote or self.venv
self.delegate_args = [] # type: t.List[str]
if self.delegate:
self.requirements = True
self.inject_httptester = args.inject_httptester if 'inject_httptester' in args else False # type: bool
self.httptester = docker_qualify_image(args.httptester if 'httptester' in args else '') # type: str
krb5_password = args.httptester_krb5_password if 'httptester_krb5_password' in args else ''
self.httptester_krb5_password = krb5_password or generate_password() # type: str
if self.get_delegated_completion().get('httptester', 'enabled') == 'disabled':
self.httptester = False
if self.get_delegated_completion().get('pip-check', 'enabled') == 'disabled':
self.pip_check = False
if args.check_python and args.check_python != actual_major_minor:
raise ApplicationError('Running under Python %s instead of Python %s as expected.' % (actual_major_minor, args.check_python))
if self.docker_keep_git:
def git_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""Add files from the content root .git directory to the payload file list."""
for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
paths = [os.path.join(dirpath, filename) for filename in filenames]
files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
data_context().register_payload_callback(git_callback)
@property
def python_executable(self):
"""
:rtype: str
"""
return find_python(self.python_version)
@property
def pip_command(self):
"""
:rtype: list[str]
"""
return generate_pip_command(self.python_executable)
def get_delegated_completion(self):
"""Returns a dictionary of settings specific to the selected delegation system, if any. Otherwise returns an empty dictionary.
:rtype: dict[str, str]
"""
if self.docker:
return get_docker_completion().get(self.docker_raw, {})
if self.remote:
return get_remote_completion().get(self.remote, {})
return {}
class TestConfig(EnvironmentConfig):
"""Configuration common to all test commands."""
def __init__(self, args, command):
"""
:type args: any
:type command: str
"""
super(TestConfig, self).__init__(args, command)
self.coverage = args.coverage # type: bool
self.coverage_label = args.coverage_label # type: str
self.coverage_check = args.coverage_check # type: bool
self.coverage_config_base_path = None # type: t.Optional[str]
self.include = args.include or [] # type: t.List[str]
self.exclude = args.exclude or [] # type: t.List[str]
self.require = args.require or [] # type: t.List[str]
self.changed = args.changed # type: bool
self.tracked = args.tracked # type: bool
self.untracked = args.untracked # type: bool
self.committed = args.committed # type: bool
self.staged = args.staged # type: bool
self.unstaged = args.unstaged # type: bool
self.changed_from = args.changed_from # type: str
self.changed_path = args.changed_path # type: t.List[str]
self.base_branch = args.base_branch # type: str
self.lint = args.lint if 'lint' in args else False # type: bool
self.junit = args.junit if 'junit' in args else False # type: bool
self.failure_ok = args.failure_ok if 'failure_ok' in args else False # type: bool
self.metadata = Metadata.from_file(args.metadata) if args.metadata else Metadata()
self.metadata_path = None
if self.coverage_check:
self.coverage = True
def metadata_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""Add the metadata file to the payload file list."""
config = self
if self.metadata_path:
files.append((os.path.abspath(config.metadata_path), config.metadata_path))
data_context().register_payload_callback(metadata_callback)
class ShellConfig(EnvironmentConfig):
"""Configuration for the shell command."""
def __init__(self, args):
"""
:type args: any
"""
super(ShellConfig, self).__init__(args, 'shell')
self.raw = args.raw # type: bool
if self.raw:
self.httptester = False
class SanityConfig(TestConfig):
"""Configuration for the sanity command."""
def __init__(self, args):
"""
:type args: any
"""
super(SanityConfig, self).__init__(args, 'sanity')
self.test = args.test # type: t.List[str]
self.skip_test = args.skip_test # type: t.List[str]
self.list_tests = args.list_tests # type: bool
self.allow_disabled = args.allow_disabled # type: bool
self.enable_optional_errors = args.enable_optional_errors # type: bool
self.info_stderr = self.lint
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""
def __init__(self, args, command):
"""
:type args: any
:type command: str
"""
super(IntegrationConfig, self).__init__(args, command)
self.start_at = args.start_at # type: str
self.start_at_task = args.start_at_task # type: str
self.allow_destructive = args.allow_destructive # type: bool
self.allow_root = args.allow_root # type: bool
self.allow_disabled = args.allow_disabled # type: bool
self.allow_unstable = args.allow_unstable # type: bool
self.allow_unstable_changed = args.allow_unstable_changed # type: bool
self.allow_unsupported = args.allow_unsupported # type: bool
self.retry_on_error = args.retry_on_error # type: bool
self.continue_on_error = args.continue_on_error # type: bool
self.debug_strategy = args.debug_strategy # type: bool
self.changed_all_target = args.changed_all_target # type: str
self.changed_all_mode = args.changed_all_mode # type: str
self.list_targets = args.list_targets # type: bool
self.tags = args.tags
self.skip_tags = args.skip_tags
self.diff = args.diff
self.no_temp_workdir = args.no_temp_workdir
self.no_temp_unicode = args.no_temp_unicode
if self.get_delegated_completion().get('temp-unicode', 'enabled') == 'disabled':
self.no_temp_unicode = True
if self.list_targets:
self.explain = True
self.info_stderr = True
def get_ansible_config(self): # type: () -> str
"""Return the path to the Ansible config for the given config."""
ansible_config_relative_path = os.path.join(data_context().content.integration_path, '%s.cfg' % self.command)
ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path)
if not os.path.exists(ansible_config_path):
# use the default empty configuration unless one has been provided
ansible_config_path = super(IntegrationConfig, self).get_ansible_config()
return ansible_config_path
class PosixIntegrationConfig(IntegrationConfig):
"""Configuration for the posix integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(PosixIntegrationConfig, self).__init__(args, 'integration')
class WindowsIntegrationConfig(IntegrationConfig):
"""Configuration for the windows integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(WindowsIntegrationConfig, self).__init__(args, 'windows-integration')
self.windows = args.windows # type: t.List[str]
self.inventory = args.inventory # type: str
if self.windows:
self.allow_destructive = True
class NetworkIntegrationConfig(IntegrationConfig):
"""Configuration for the network integration command."""
def __init__(self, args):
"""
:type args: any
"""
super(NetworkIntegrationConfig, self).__init__(args, 'network-integration')
self.platform = args.platform # type: t.List[str]
self.platform_collection = dict(args.platform_collection or []) # type: t.Dict[str, str]
self.platform_connection = dict(args.platform_connection or []) # type: t.Dict[str, str]
self.inventory = args.inventory # type: str
self.testcase = args.testcase # type: str
class UnitsConfig(TestConfig):
"""Configuration for the units command."""
def __init__(self, args):
"""
:type args: any
"""
super(UnitsConfig, self).__init__(args, 'units')
self.collect_only = args.collect_only # type: bool
self.num_workers = args.num_workers # type: int
self.requirements_mode = args.requirements_mode if 'requirements_mode' in args else ''
if self.requirements_mode == 'only':
self.requirements = True
elif self.requirements_mode == 'skip':
self.requirements = False
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,014 |
ansible-test units --docker=default throwing Certificate issues when running ansible_test/_data/quiet_pip.py
|
### Summary
This started happening a few hours ago when we run
ansible-test units --docker=default we get the cert issues below from ansible_test/_data/quite_pip.py
I've tried this on my mac (on the personal network), and a few linux Vm (on work network) all got the same issue. I've already recreated this issue in github actions, you can see the output of the job here. https://github.com/ansible-collections/netapp/actions/runs/681234941
If i give a python version like
ansible-test units --docker=default --python 3.5 then it will work, but if i don't give a python version it will get the error below.
ansible-test sanity --docker=default works fine
```python-traceback
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
$ ansible --version
carchi@carchi-mac-1 ontap (master) $ ansible --version
ansible 2.10.6
config file = None
configured module search path = ['/Users/carchi/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/ansible
executable location = /Library/Frameworks/Python.framework/Versions/3.9/bin/ansible
python version = 3.9.1 (v3.9.1:1e5d33e9b9, Dec 7 2020, 12:10:52) [Clang 6.0 (clang-600.0.57)]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $
```
### OS / Environment
I can replicate this on my mac, and a Linux VM.
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
Run ansible-test units --docker=default on a collection
```
### Expected Results
Unit test to run.
### Actual Results
```console (paste below)
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
|
https://github.com/ansible/ansible/issues/74014
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-03-23T23:07:33Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/delegation.py
|
"""Delegate test execution to another environment."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import sys
import tempfile
from . import types as t
from .io import (
make_dirs,
read_text_file,
)
from .executor import (
SUPPORTED_PYTHON_VERSIONS,
HTTPTESTER_HOSTS,
create_shell_command,
run_httptester,
start_httptester,
get_python_interpreter,
get_python_version,
)
from .config import (
TestConfig,
EnvironmentConfig,
IntegrationConfig,
WindowsIntegrationConfig,
NetworkIntegrationConfig,
ShellConfig,
SanityConfig,
UnitsConfig,
)
from .core_ci import (
AnsibleCoreCI,
SshKey,
)
from .manage_ci import (
ManagePosixCI,
ManageWindowsCI,
get_ssh_key_setup,
)
from .util import (
ApplicationError,
common_environment,
display,
ANSIBLE_BIN_PATH,
ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_ROOT,
tempdir,
)
from .util_common import (
run_command,
ResultType,
create_interpreter_wrapper,
get_docker_completion,
get_remote_completion,
)
from .docker_util import (
docker_exec,
docker_get,
docker_pull,
docker_put,
docker_rm,
docker_run,
docker_available,
docker_network_disconnect,
get_docker_networks,
get_docker_preferred_network_name,
get_docker_hostname,
is_docker_user_defined_network,
)
from .cloud import (
get_cloud_providers,
)
from .target import (
IntegrationTarget,
)
from .data import (
data_context,
)
from .payload import (
create_payload,
)
from .venv import (
create_virtual_environment,
)
from .ci import (
get_ci_provider,
)
def check_delegation_args(args):
"""
:type args: CommonConfig
"""
if not isinstance(args, EnvironmentConfig):
return
if args.docker:
get_python_version(args, get_docker_completion(), args.docker_raw)
elif args.remote:
get_python_version(args, get_remote_completion(), args.remote)
def delegate(args, exclude, require, integration_targets):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
:type integration_targets: tuple[IntegrationTarget]
:rtype: bool
"""
if isinstance(args, TestConfig):
args.metadata.ci_provider = get_ci_provider().code
make_dirs(ResultType.TMP.path)
with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
args.metadata.to_file(args.metadata_path)
try:
return delegate_command(args, exclude, require, integration_targets)
finally:
args.metadata_path = None
else:
return delegate_command(args, exclude, require, integration_targets)
def delegate_command(args, exclude, require, integration_targets):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
:type integration_targets: tuple[IntegrationTarget]
:rtype: bool
"""
if args.venv:
delegate_venv(args, exclude, require, integration_targets)
return True
if args.docker:
delegate_docker(args, exclude, require, integration_targets)
return True
if args.remote:
delegate_remote(args, exclude, require, integration_targets)
return True
return False
def delegate_venv(args, # type: EnvironmentConfig
exclude, # type: t.List[str]
require, # type: t.List[str]
integration_targets, # type: t.Tuple[IntegrationTarget, ...]
): # type: (...) -> None
"""Delegate ansible-test execution to a virtual environment using venv or virtualenv."""
if args.python:
versions = (args.python_version,)
else:
versions = SUPPORTED_PYTHON_VERSIONS
if args.httptester:
needs_httptester = sorted(target.name for target in integration_targets if 'needs/httptester/' in target.aliases)
if needs_httptester:
display.warning('Use --docker or --remote to enable httptester for tests marked "needs/httptester": %s' % ', '.join(needs_httptester))
if args.venv_system_site_packages:
suffix = '-ssp'
else:
suffix = ''
venvs = dict((version, os.path.join(ResultType.TMP.path, 'delegation', 'python%s%s' % (version, suffix))) for version in versions)
venvs = dict((version, path) for version, path in venvs.items() if create_virtual_environment(args, version, path, args.venv_system_site_packages))
if not venvs:
raise ApplicationError('No usable virtual environment support found.')
options = {
'--venv': 0,
'--venv-system-site-packages': 0,
}
with tempdir() as inject_path:
for version, path in venvs.items():
create_interpreter_wrapper(os.path.join(path, 'bin', 'python'), os.path.join(inject_path, 'python%s' % version))
python_interpreter = os.path.join(inject_path, 'python%s' % args.python_version)
cmd = generate_command(args, python_interpreter, ANSIBLE_BIN_PATH, data_context().content.root, options, exclude, require)
if isinstance(args, TestConfig):
if args.coverage and not args.coverage_label:
cmd += ['--coverage-label', 'venv']
env = common_environment()
with tempdir() as library_path:
# expose ansible and ansible_test to the virtual environment (only required when running from an install)
os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible'))
os.symlink(ANSIBLE_TEST_ROOT, os.path.join(library_path, 'ansible_test'))
env.update(
PATH=inject_path + os.path.pathsep + env['PATH'],
PYTHONPATH=library_path,
)
run_command(args, cmd, env=env)
def delegate_docker(args, exclude, require, integration_targets):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
:type integration_targets: tuple[IntegrationTarget]
"""
test_image = args.docker
privileged = args.docker_privileged
if isinstance(args, ShellConfig):
use_httptester = args.httptester
else:
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
if use_httptester:
docker_pull(args, args.httptester)
docker_pull(args, test_image)
httptester_id = None
test_id = None
success = False
options = {
'--docker': 1,
'--docker-privileged': 0,
'--docker-util': 1,
}
python_interpreter = get_python_interpreter(args, get_docker_completion(), args.docker_raw)
pwd = '/root'
ansible_root = os.path.join(pwd, 'ansible')
if data_context().content.collection:
content_root = os.path.join(pwd, data_context().content.collection.directory)
else:
content_root = ansible_root
remote_results_root = os.path.join(content_root, data_context().content.results_path)
cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
if isinstance(args, TestConfig):
if args.coverage and not args.coverage_label:
image_label = args.docker_raw
image_label = re.sub('[^a-zA-Z0-9]+', '-', image_label)
cmd += ['--coverage-label', 'docker-%s' % image_label]
if isinstance(args, IntegrationConfig):
if not args.allow_destructive:
cmd.append('--allow-destructive')
cmd_options = []
if isinstance(args, ShellConfig) or (isinstance(args, IntegrationConfig) and args.debug_strategy):
cmd_options.append('-it')
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
try:
create_payload(args, local_source_fd.name)
if use_httptester:
httptester_id = run_httptester(args)
else:
httptester_id = None
test_options = [
'--detach',
'--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
'--privileged=%s' % str(privileged).lower(),
]
if args.docker_memory:
test_options.extend([
'--memory=%d' % args.docker_memory,
'--memory-swap=%d' % args.docker_memory,
])
docker_socket = '/var/run/docker.sock'
if args.docker_seccomp != 'default':
test_options += ['--security-opt', 'seccomp=%s' % args.docker_seccomp]
if get_docker_hostname() != 'localhost' or os.path.exists(docker_socket):
test_options += ['--volume', '%s:%s' % (docker_socket, docker_socket)]
if httptester_id:
test_options += ['--env', 'HTTPTESTER=1', '--env', 'KRB5_PASSWORD=%s' % args.httptester_krb5_password]
network = get_docker_preferred_network_name(args)
if not is_docker_user_defined_network(network):
# legacy links are required when using the default bridge network instead of user-defined networks
for host in HTTPTESTER_HOSTS:
test_options += ['--link', '%s:%s' % (httptester_id, host)]
if isinstance(args, IntegrationConfig):
cloud_platforms = get_cloud_providers(args)
for cloud_platform in cloud_platforms:
test_options += cloud_platform.get_docker_run_options()
test_id = docker_run(args, test_image, options=test_options)[0]
if args.explain:
test_id = 'test_id'
else:
test_id = test_id.strip()
setup_sh = read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'docker.sh'))
ssh_keys_sh = get_ssh_key_setup(SshKey(args))
setup_sh += ssh_keys_sh
shell = setup_sh.splitlines()[0][2:]
docker_exec(args, test_id, [shell], data=setup_sh)
# write temporary files to /root since /tmp isn't ready immediately on container start
docker_put(args, test_id, local_source_fd.name, '/root/test.tgz')
docker_exec(args, test_id, ['tar', 'oxzf', '/root/test.tgz', '-C', '/root'])
# docker images are only expected to have a single python version available
if isinstance(args, UnitsConfig) and not args.python:
cmd += ['--python', 'default']
# run unit tests unprivileged to prevent stray writes to the source tree
# also disconnect from the network once requirements have been installed
if isinstance(args, UnitsConfig):
writable_dirs = [
os.path.join(content_root, ResultType.JUNIT.relative_path),
os.path.join(content_root, ResultType.COVERAGE.relative_path),
]
docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
docker_exec(args, test_id, ['chmod', '755', '/root'])
docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
docker_exec(args, test_id, ['useradd', 'pytest', '--create-home'])
docker_exec(args, test_id, cmd + ['--requirements-mode', 'only'], options=cmd_options)
networks = get_docker_networks(args, test_id)
if networks is not None:
for network in networks:
docker_network_disconnect(args, test_id, network)
else:
display.warning('Network disconnection is not supported (this is normal under podman). '
'Tests will not be isolated from the network. Network-related tests may misbehave.')
cmd += ['--requirements-mode', 'skip']
cmd_options += ['--user', 'pytest']
try:
docker_exec(args, test_id, cmd, options=cmd_options)
# docker_exec will throw SubprocessError if not successful
# If we make it here, all the prep work earlier and the docker_exec line above were all successful.
success = True
finally:
local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
remote_test_root = os.path.dirname(remote_results_root)
remote_results_name = os.path.basename(remote_results_root)
remote_temp_file = os.path.join('/root', remote_results_name + '.tgz')
make_dirs(local_test_root) # make sure directory exists for collections which have no tests
with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '--exclude', ResultType.TMP.name, '-C', remote_test_root, remote_results_name])
docker_get(args, test_id, remote_temp_file, local_result_fd.name)
run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root])
finally:
if httptester_id:
docker_rm(args, httptester_id)
if test_id:
if args.docker_terminate == 'always' or (args.docker_terminate == 'success' and success):
docker_rm(args, test_id)
def delegate_remote(args, exclude, require, integration_targets):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
:type integration_targets: tuple[IntegrationTarget]
"""
remote = args.parsed_remote
core_ci = AnsibleCoreCI(args, remote.platform, remote.version, stage=args.remote_stage, provider=args.remote_provider, arch=remote.arch)
success = False
raw = False
if isinstance(args, ShellConfig):
use_httptester = args.httptester
raw = args.raw
else:
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
if use_httptester and not docker_available():
display.warning('Assuming --disable-httptester since `docker` is not available.')
use_httptester = False
httptester_id = None
ssh_options = []
content_root = None
try:
core_ci.start()
if use_httptester:
httptester_id, ssh_options = start_httptester(args)
core_ci.wait()
python_version = get_python_version(args, get_remote_completion(), args.remote)
if remote.platform == 'windows':
# Windows doesn't need the ansible-test fluff, just run the SSH command
manage = ManageWindowsCI(core_ci)
manage.setup(python_version)
cmd = ['powershell.exe']
elif raw:
manage = ManagePosixCI(core_ci)
manage.setup(python_version)
cmd = create_shell_command(['sh'])
else:
manage = ManagePosixCI(core_ci)
pwd = manage.setup(python_version)
options = {
'--remote': 1,
}
python_interpreter = get_python_interpreter(args, get_remote_completion(), args.remote)
ansible_root = os.path.join(pwd, 'ansible')
if data_context().content.collection:
content_root = os.path.join(pwd, data_context().content.collection.directory)
else:
content_root = ansible_root
cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
if httptester_id:
cmd += ['--inject-httptester', '--httptester-krb5-password', args.httptester_krb5_password]
if isinstance(args, TestConfig):
if args.coverage and not args.coverage_label:
cmd += ['--coverage-label', 'remote-%s-%s' % (remote.platform, remote.version)]
if isinstance(args, IntegrationConfig):
if not args.allow_destructive:
cmd.append('--allow-destructive')
# remote instances are only expected to have a single python version available
if isinstance(args, UnitsConfig) and not args.python:
cmd += ['--python', 'default']
if isinstance(args, IntegrationConfig):
cloud_platforms = get_cloud_providers(args)
for cloud_platform in cloud_platforms:
ssh_options += cloud_platform.get_remote_ssh_options()
try:
manage.ssh(cmd, ssh_options)
success = True
finally:
download = False
if remote.platform != 'windows':
download = True
if isinstance(args, ShellConfig):
if args.raw:
download = False
if download and content_root:
local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
remote_results_root = os.path.join(content_root, data_context().content.results_path)
remote_results_name = os.path.basename(remote_results_root)
remote_temp_path = os.path.join('/tmp', remote_results_name)
# AIX cp and GNU cp provide different options, no way could be found to have a common
# pattern and achieve the same goal
cp_opts = '-hr' if remote.platform in ['aix', 'ibmi'] else '-a'
manage.ssh('rm -rf {0} && mkdir {0} && cp {1} {2}/* {0}/ && chmod -R a+r {0}'.format(remote_temp_path, cp_opts, remote_results_root))
manage.download(remote_temp_path, local_test_root)
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
core_ci.stop()
if httptester_id:
docker_rm(args, httptester_id)
def generate_command(args, python_interpreter, ansible_bin_path, content_root, options, exclude, require):
"""
:type args: EnvironmentConfig
:type python_interpreter: str | None
:type ansible_bin_path: str
:type content_root: str
:type options: dict[str, int]
:type exclude: list[str]
:type require: list[str]
:rtype: list[str]
"""
options['--color'] = 1
cmd = [os.path.join(ansible_bin_path, 'ansible-test')]
if python_interpreter:
cmd = [python_interpreter] + cmd
# Force the encoding used during delegation.
# This is only needed because ansible-test relies on Python's file system encoding.
# Environments that do not have the locale configured are thus unable to work with unicode file paths.
# Examples include FreeBSD and some Linux containers.
env_vars = dict(
LC_ALL='en_US.UTF-8',
ANSIBLE_TEST_CONTENT_ROOT=content_root,
)
env_args = ['%s=%s' % (key, env_vars[key]) for key in sorted(env_vars)]
cmd = ['/usr/bin/env'] + env_args + cmd
cmd += list(filter_options(args, sys.argv[1:], options, exclude, require))
cmd += ['--color', 'yes' if args.color else 'no']
if args.requirements:
cmd += ['--requirements']
if isinstance(args, ShellConfig):
cmd = create_shell_command(cmd)
elif isinstance(args, SanityConfig):
base_branch = args.base_branch or get_ci_provider().get_base_branch()
if base_branch:
cmd += ['--base-branch', base_branch]
return cmd
def filter_options(args, argv, options, exclude, require):
"""
:type args: EnvironmentConfig
:type argv: list[str]
:type options: dict[str, int]
:type exclude: list[str]
:type require: list[str]
:rtype: collections.Iterable[str]
"""
options = options.copy()
options['--requirements'] = 0
options['--truncate'] = 1
options['--redact'] = 0
options['--no-redact'] = 0
if isinstance(args, TestConfig):
options.update({
'--changed': 0,
'--tracked': 0,
'--untracked': 0,
'--ignore-committed': 0,
'--ignore-staged': 0,
'--ignore-unstaged': 0,
'--changed-from': 1,
'--changed-path': 1,
'--metadata': 1,
'--exclude': 1,
'--require': 1,
})
elif isinstance(args, SanityConfig):
options.update({
'--base-branch': 1,
})
if isinstance(args, IntegrationConfig):
options.update({
'--no-temp-unicode': 0,
'--no-pip-check': 0,
})
if isinstance(args, (NetworkIntegrationConfig, WindowsIntegrationConfig)):
options.update({
'--inventory': 1,
})
remaining = 0
for arg in argv:
if not arg.startswith('-') and remaining:
remaining -= 1
continue
remaining = 0
parts = arg.split('=', 1)
key = parts[0]
if key in options:
remaining = options[key] - len(parts) + 1
continue
yield arg
for arg in args.delegate_args:
yield arg
for target in exclude:
yield '--exclude'
yield target
for target in require:
yield '--require'
yield target
if isinstance(args, TestConfig):
if args.metadata_path:
yield '--metadata'
yield args.metadata_path
yield '--truncate'
yield '%d' % args.truncate
if args.redact:
yield '--redact'
else:
yield '--no-redact'
if isinstance(args, IntegrationConfig):
if args.no_temp_unicode:
yield '--no-temp-unicode'
if not args.pip_check:
yield '--no-pip-check'
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,014 |
ansible-test units --docker=default throwing Certificate issues when running ansible_test/_data/quiet_pip.py
|
### Summary
This started happening a few hours ago when we run
ansible-test units --docker=default we get the cert issues below from ansible_test/_data/quite_pip.py
I've tried this on my mac (on the personal network), and a few linux Vm (on work network) all got the same issue. I've already recreated this issue in github actions, you can see the output of the job here. https://github.com/ansible-collections/netapp/actions/runs/681234941
If i give a python version like
ansible-test units --docker=default --python 3.5 then it will work, but if i don't give a python version it will get the error below.
ansible-test sanity --docker=default works fine
```python-traceback
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
$ ansible --version
carchi@carchi-mac-1 ontap (master) $ ansible --version
ansible 2.10.6
config file = None
configured module search path = ['/Users/carchi/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/ansible
executable location = /Library/Frameworks/Python.framework/Versions/3.9/bin/ansible
python version = 3.9.1 (v3.9.1:1e5d33e9b9, Dec 7 2020, 12:10:52) [Clang 6.0 (clang-600.0.57)]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $
```
### OS / Environment
I can replicate this on my mac, and a Linux VM.
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
Run ansible-test units --docker=default on a collection
```
### Expected Results
Unit test to run.
### Actual Results
```console (paste below)
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
|
https://github.com/ansible/ansible/issues/74014
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-03-23T23:07:33Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/docker_util.py
|
"""Functions for accessing docker via the docker cli."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import time
from . import types as t
from .io import (
open_binary_file,
read_text_file,
)
from .util import (
ApplicationError,
common_environment,
display,
find_executable,
SubprocessError,
)
from .http import (
urlparse,
)
from .util_common import (
run_command,
)
from .config import (
EnvironmentConfig,
)
BUFFER_SIZE = 256 * 256
def docker_available():
"""
:rtype: bool
"""
return find_executable('docker', required=False)
def get_docker_hostname(): # type: () -> str
"""Return the hostname of the Docker service."""
try:
return get_docker_hostname.hostname
except AttributeError:
pass
docker_host = os.environ.get('DOCKER_HOST')
if docker_host and docker_host.startswith('tcp://'):
try:
hostname = urlparse(docker_host)[1].split(':')[0]
display.info('Detected Docker host: %s' % hostname, verbosity=1)
except ValueError:
hostname = 'localhost'
display.warning('Could not parse DOCKER_HOST environment variable "%s", falling back to localhost.' % docker_host)
else:
hostname = 'localhost'
display.info('Assuming Docker is available on localhost.', verbosity=1)
get_docker_hostname.hostname = hostname
return hostname
def get_docker_container_id():
"""
:rtype: str | None
"""
try:
return get_docker_container_id.container_id
except AttributeError:
pass
path = '/proc/self/cpuset'
container_id = None
if os.path.exists(path):
# File content varies based on the environment:
# No Container: /
# Docker: /docker/c86f3732b5ba3d28bb83b6e14af767ab96abbc52de31313dcb1176a62d91a507
# Azure Pipelines (Docker): /azpl_job/0f2edfed602dd6ec9f2e42c867f4d5ee640ebf4c058e6d3196d4393bb8fd0891
# Podman: /../../../../../..
contents = read_text_file(path)
cgroup_path, cgroup_name = os.path.split(contents.strip())
if cgroup_path in ('/docker', '/azpl_job'):
container_id = cgroup_name
get_docker_container_id.container_id = container_id
if container_id:
display.info('Detected execution in Docker container: %s' % container_id, verbosity=1)
return container_id
def get_docker_container_ip(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
:rtype: str
"""
results = docker_inspect(args, container_id)
network_settings = results[0]['NetworkSettings']
networks = network_settings.get('Networks')
if networks:
network_name = get_docker_preferred_network_name(args)
ipaddress = networks[network_name]['IPAddress']
else:
# podman doesn't provide Networks, fall back to using IPAddress
ipaddress = network_settings['IPAddress']
if not ipaddress:
raise ApplicationError('Cannot retrieve IP address for container: %s' % container_id)
return ipaddress
def get_docker_network_name(args, container_id): # type: (EnvironmentConfig, str) -> str
"""
Return the network name of the specified container.
Raises an exception if zero or more than one network is found.
"""
networks = get_docker_networks(args, container_id)
if not networks:
raise ApplicationError('No network found for Docker container: %s.' % container_id)
if len(networks) > 1:
raise ApplicationError('Found multiple networks for Docker container %s instead of only one: %s' % (container_id, ', '.join(networks)))
return networks[0]
def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
"""
Return the preferred network name for use with Docker. The selection logic is:
- the network selected by the user with `--docker-network`
- the network of the currently running docker container (if any)
- the default docker network (returns None)
"""
network = None
if args.docker_network:
network = args.docker_network
else:
current_container_id = get_docker_container_id()
if current_container_id:
# Make sure any additional containers we launch use the same network as the current container we're running in.
# This is needed when ansible-test is running in a container that is not connected to Docker's default network.
network = get_docker_network_name(args, current_container_id)
return network
def is_docker_user_defined_network(network): # type: (str) -> bool
"""Return True if the network being used is a user-defined network."""
return network and network != 'bridge'
def get_docker_networks(args, container_id):
"""
:param args: EnvironmentConfig
:param container_id: str
:rtype: list[str]
"""
results = docker_inspect(args, container_id)
# podman doesn't return Networks- just silently return None if it's missing...
networks = results[0]['NetworkSettings'].get('Networks')
if networks is None:
return None
return sorted(networks)
def docker_pull(args, image):
"""
:type args: EnvironmentConfig
:type image: str
"""
if ('@' in image or ':' in image) and docker_images(args, image):
display.info('Skipping docker pull of existing image with tag or digest: %s' % image, verbosity=2)
return
if not args.docker_pull:
display.warning('Skipping docker pull for "%s". Image may be out-of-date.' % image)
return
for _iteration in range(1, 10):
try:
docker_command(args, ['pull', image])
return
except SubprocessError:
display.warning('Failed to pull docker image "%s". Waiting a few seconds before trying again.' % image)
time.sleep(3)
raise ApplicationError('Failed to pull docker image "%s".' % image)
def docker_put(args, container_id, src, dst):
"""
:type args: EnvironmentConfig
:type container_id: str
:type src: str
:type dst: str
"""
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
with open_binary_file(src) as src_fd:
docker_exec(args, container_id, ['dd', 'of=%s' % dst, 'bs=%s' % BUFFER_SIZE],
options=['-i'], stdin=src_fd, capture=True)
def docker_get(args, container_id, src, dst):
"""
:type args: EnvironmentConfig
:type container_id: str
:type src: str
:type dst: str
"""
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
with open_binary_file(dst, 'wb') as dst_fd:
docker_exec(args, container_id, ['dd', 'if=%s' % src, 'bs=%s' % BUFFER_SIZE],
options=['-i'], stdout=dst_fd, capture=True)
def docker_run(args, image, options, cmd=None, create_only=False):
"""
:type args: EnvironmentConfig
:type image: str
:type options: list[str] | None
:type cmd: list[str] | None
:type create_only[bool] | False
:rtype: str | None, str | None
"""
if not options:
options = []
if not cmd:
cmd = []
if create_only:
command = 'create'
else:
command = 'run'
network = get_docker_preferred_network_name(args)
if is_docker_user_defined_network(network):
# Only when the network is not the default bridge network.
# Using this with the default bridge network results in an error when using --link: links are only supported for user-defined networks
options.extend(['--network', network])
for _iteration in range(1, 3):
try:
return docker_command(args, [command] + options + [image] + cmd, capture=True)
except SubprocessError as ex:
display.error(ex)
display.warning('Failed to run docker image "%s". Waiting a few seconds before trying again.' % image)
time.sleep(3)
raise ApplicationError('Failed to run docker image "%s".' % image)
def docker_start(args, container_id, options): # type: (EnvironmentConfig, str, t.List[str]) -> (t.Optional[str], t.Optional[str])
"""
Start a docker container by name or ID
"""
if not options:
options = []
for _iteration in range(1, 3):
try:
return docker_command(args, ['start'] + options + [container_id], capture=True)
except SubprocessError as ex:
display.error(ex)
display.warning('Failed to start docker container "%s". Waiting a few seconds before trying again.' % container_id)
time.sleep(3)
raise ApplicationError('Failed to run docker container "%s".' % container_id)
def docker_images(args, image):
"""
:param args: CommonConfig
:param image: str
:rtype: list[dict[str, any]]
"""
try:
stdout, _dummy = docker_command(args, ['images', image, '--format', '{{json .}}'], capture=True, always=True)
except SubprocessError as ex:
if 'no such image' in ex.stderr:
return [] # podman does not handle this gracefully, exits 125
if 'function "json" not defined' in ex.stderr:
# podman > 2 && < 2.2.0 breaks with --format {{json .}}, and requires --format json
# So we try this as a fallback. If it fails again, we just raise the exception and bail.
stdout, _dummy = docker_command(args, ['images', image, '--format', 'json'], capture=True, always=True)
else:
raise ex
if stdout.startswith('['):
# modern podman outputs a pretty-printed json list. Just load the whole thing.
return json.loads(stdout)
# docker outputs one json object per line (jsonl)
return [json.loads(line) for line in stdout.splitlines()]
def docker_rm(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
"""
try:
docker_command(args, ['rm', '-f', container_id], capture=True)
except SubprocessError as ex:
if 'no such container' in ex.stderr:
pass # podman does not handle this gracefully, exits 1
else:
raise ex
def docker_inspect(args, container_id):
"""
:type args: EnvironmentConfig
:type container_id: str
:rtype: list[dict]
"""
if args.explain:
return []
try:
stdout = docker_command(args, ['inspect', container_id], capture=True)[0]
return json.loads(stdout)
except SubprocessError as ex:
if 'no such image' in ex.stderr:
return [] # podman does not handle this gracefully, exits 125
try:
return json.loads(ex.stdout)
except Exception:
raise ex
def docker_network_disconnect(args, container_id, network):
"""
:param args: EnvironmentConfig
:param container_id: str
:param network: str
"""
docker_command(args, ['network', 'disconnect', network, container_id], capture=True)
def docker_network_inspect(args, network):
"""
:type args: EnvironmentConfig
:type network: str
:rtype: list[dict]
"""
if args.explain:
return []
try:
stdout = docker_command(args, ['network', 'inspect', network], capture=True)[0]
return json.loads(stdout)
except SubprocessError as ex:
try:
return json.loads(ex.stdout)
except Exception:
raise ex
def docker_exec(args, container_id, cmd, options=None, capture=False, stdin=None, stdout=None, data=None):
"""
:type args: EnvironmentConfig
:type container_id: str
:type cmd: list[str]
:type options: list[str] | None
:type capture: bool
:type stdin: BinaryIO | None
:type stdout: BinaryIO | None
:type data: str | None
:rtype: str | None, str | None
"""
if not options:
options = []
if data:
options.append('-i')
return docker_command(args, ['exec'] + options + [container_id] + cmd, capture=capture, stdin=stdin, stdout=stdout, data=data)
def docker_info(args):
"""
:param args: CommonConfig
:rtype: dict[str, any]
"""
stdout, _dummy = docker_command(args, ['info', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
def docker_version(args):
"""
:param args: CommonConfig
:rtype: dict[str, any]
"""
stdout, _dummy = docker_command(args, ['version', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
def docker_command(args, cmd, capture=False, stdin=None, stdout=None, always=False, data=None):
"""
:type args: CommonConfig
:type cmd: list[str]
:type capture: bool
:type stdin: file | None
:type stdout: file | None
:type always: bool
:type data: str | None
:rtype: str | None, str | None
"""
env = docker_environment()
return run_command(args, ['docker'] + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, always=always, data=data)
def docker_environment():
"""
:rtype: dict[str, str]
"""
env = common_environment()
env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_')))
return env
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,014 |
ansible-test units --docker=default throwing Certificate issues when running ansible_test/_data/quiet_pip.py
|
### Summary
This started happening a few hours ago when we run
ansible-test units --docker=default we get the cert issues below from ansible_test/_data/quite_pip.py
I've tried this on my mac (on the personal network), and a few linux Vm (on work network) all got the same issue. I've already recreated this issue in github actions, you can see the output of the job here. https://github.com/ansible-collections/netapp/actions/runs/681234941
If i give a python version like
ansible-test units --docker=default --python 3.5 then it will work, but if i don't give a python version it will get the error below.
ansible-test sanity --docker=default works fine
```python-traceback
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console (paste below)
$ ansible --version
carchi@carchi-mac-1 ontap (master) $ ansible --version
ansible 2.10.6
config file = None
configured module search path = ['/Users/carchi/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/ansible
executable location = /Library/Frameworks/Python.framework/Versions/3.9/bin/ansible
python version = 3.9.1 (v3.9.1:1e5d33e9b9, Dec 7 2020, 12:10:52) [Clang 6.0 (clang-600.0.57)]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $ ansible-config dump --only-changed
carchi@carchi-mac-1 ontap (master) $
```
### OS / Environment
I can replicate this on my mac, and a Linux VM.
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
Run ansible-test units --docker=default on a collection
```
### Expected Results
Unit test to run.
### Actual Results
```console (paste below)
Certificate did not match expected hostname: files.pythonhosted.org. Certificate: {'notAfter': 'Apr 28 19:20:25 2021 GMT', 'subjectAltName': ((u'DNS', 'r.ssl.fastly.net'), (u'DNS', '*.catchpoint.com'), (u'DNS', '*.cnn.io'), (u'DNS', '*.dollarshaveclub.com'), (u'DNS', '*.eater.com'), (u'DNS', '*.fastly.picmonkey.com'), (u'DNS', '*.files.saymedia-content.com'), (u'DNS', '*.ft.com'), (u'DNS', '*.meetupstatic.com'), (u'DNS', '*.nfl.com'), (u'DNS', '*.pagar.me'), (u'DNS', '*.picmonkey.com'), (u'DNS', '*.realself.com'), (u'DNS', '*.sbnation.com'), (u'DNS', '*.shakr.com'), (u'DNS', '*.streamable.com'), (u'DNS', '*.surfly.com'), (u'DNS', '*.theverge.com'), (u'DNS', '*.thrillist.com'), (u'DNS', '*.vox-cdn.com'), (u'DNS', '*.vox.com'), (u'DNS', '*.voxmedia.com'), (u'DNS', 'eater.com'), (u'DNS', 'ft.com'), (u'DNS', 'i.gse.io'), (u'DNS', 'picmonkey.com'), (u'DNS', 'realself.com'), (u'DNS', 'static.wixstatic.com'), (u'DNS', 'streamable.com'), (u'DNS', 'surfly.com'), (u'DNS', 'theverge.com'), (u'DNS', 'vox-cdn.com'), (u'DNS', 'vox.com'), (u'DNS', 'www.joyent.com')), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'California'),), (('localityName', u'San Francisco'),), (('organizationName', u'Fastly, Inc'),), (('commonName', u'r.ssl.fastly.net'),))}
Exception:
Traceback (most recent call last):
File "/usr/local/lib/python2.6/dist-packages/pip/basecommand.py", line 215, in main
status = self.run(options, args)
File "/usr/local/lib/python2.6/dist-packages/pip/commands/install.py", line 335, in run
wb.build(autobuilding=True)
File "/usr/local/lib/python2.6/dist-packages/pip/wheel.py", line 749, in build
self.requirement_set.prepare_files(self.finder)
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 380, in prepare_files
ignore_dependencies=self.ignore_dependencies))
File "/usr/local/lib/python2.6/dist-packages/pip/req/req_set.py", line 620, in _prepare_file
session=self.session, hashes=hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 821, in unpack_url
hashes=hashes
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 659, in unpack_http_url
hashes)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 853, in _download_http_url
stream=True,
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 521, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/download.py", line 386, in request
return super(PipSession, self).request(method, url, *args, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/cachecontrol/adapter.py", line 47, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python2.6/dist-packages/pip/_vendor/requests/adapters.py", line 506, in send
raise SSLError(e, request=request)
SSLError: HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Max retries exceeded with url: /packages/ef/ab/aa12712415809bf698e719b307419f953e25344e8f42d557533d7a02b276/netapp_lib-2020.7.16-py2-none-any.whl (Caused by SSLError(CertificateError("hostname 'files.pythonhosted.org' doesn't match either of 'r.ssl.fastly.net', '*.catchpoint.com', '*.cnn.io', '*.dollarshaveclub.com', '*.eater.com', '*.fastly.picmonkey.com', '*.files.saymedia-content.com', '*.ft.com', '*.meetupstatic.com', '*.nfl.com', '*.pagar.me', '*.picmonkey.com', '*.realself.com', '*.sbnation.com', '*.shakr.com', '*.streamable.com', '*.surfly.com', '*.theverge.com', '*.thrillist.com', '*.vox-cdn.com', '*.vox.com', '*.voxmedia.com', 'eater.com', 'ft.com', 'i.gse.io', 'picmonkey.com', 'realself.com', 'static.wixstatic.com', 'streamable.com', 'surfly.com', 'theverge.com', 'vox-cdn.com', 'vox.com', 'www.joyent.com'",),))
ERROR: Command "/usr/bin/python2.6 /root/ansible/test/lib/ansible_test/_data/quiet_pip.py install --disable-pip-version-check -r /root/ansible/test/lib/ansible_test/_data/requirements/units.txt -r tests/unit/requirements.txt -c /root/ansible/test/lib/ansible_test/_data/requirements/constraints.txt" returned exit status 2.
ERROR: Command "docker exec d47eb360db4ce779c1f690db964655b76e68895c4360ff252c46fe7fe6f5c75a /usr/bin/env ANSIBLE_TEST_CONTENT_ROOT=/root/ansible_collections/netapp/ontap LC_ALL=en_US.UTF-8 /usr/bin/python3.6 /root/ansible/bin/ansible-test units --metadata tests/output/.tmp/metadata-9i2qfrcl.json --truncate 200 --redact --color yes --requirements --python default --requirements-mode only" returned exit status 1.
```
|
https://github.com/ansible/ansible/issues/74014
|
https://github.com/ansible/ansible/pull/74202
|
d7df8a4484e50b27e921e9fe887dc84c86617e7f
|
becf9416736dc911d3411b92f09512b4dae2955c
| 2021-03-23T23:07:33Z |
python
| 2021-04-08T22:47:23Z |
test/lib/ansible_test/_internal/executor.py
|
"""Execute Ansible tests."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import datetime
import re
import time
import textwrap
import functools
import difflib
import filecmp
import random
import string
import shutil
from . import types as t
from .thread import (
WrappedThread,
)
from .core_ci import (
AnsibleCoreCI,
SshKey,
)
from .manage_ci import (
ManageWindowsCI,
ManageNetworkCI,
get_network_settings,
)
from .cloud import (
cloud_filter,
cloud_init,
get_cloud_environment,
get_cloud_platforms,
CloudEnvironmentConfig,
)
from .io import (
make_dirs,
open_text_file,
read_text_file,
write_text_file,
)
from .util import (
ApplicationWarning,
ApplicationError,
SubprocessError,
display,
remove_tree,
find_executable,
raw_command,
get_available_port,
generate_pip_command,
find_python,
cmd_quote,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_TEST_CONFIG_ROOT,
get_ansible_version,
tempdir,
open_zipfile,
SUPPORTED_PYTHON_VERSIONS,
str_to_version,
version_to_str,
get_hash,
)
from .util_common import (
get_docker_completion,
get_remote_completion,
get_python_path,
intercept_command,
named_temporary_file,
run_command,
write_json_test_results,
ResultType,
handle_layout_messages,
)
from .docker_util import (
docker_pull,
docker_run,
docker_available,
docker_rm,
get_docker_container_id,
get_docker_container_ip,
get_docker_hostname,
get_docker_preferred_network_name,
is_docker_user_defined_network,
)
from .ansible_util import (
ansible_environment,
check_pyyaml,
)
from .target import (
IntegrationTarget,
walk_internal_targets,
walk_posix_integration_targets,
walk_network_integration_targets,
walk_windows_integration_targets,
TIntegrationTarget,
)
from .ci import (
get_ci_provider,
)
from .classification import (
categorize_changes,
)
from .config import (
TestConfig,
EnvironmentConfig,
IntegrationConfig,
NetworkIntegrationConfig,
PosixIntegrationConfig,
ShellConfig,
WindowsIntegrationConfig,
TIntegrationConfig,
)
from .metadata import (
ChangeDescription,
)
from .integration import (
integration_test_environment,
integration_test_config_file,
setup_common_temp_dir,
get_inventory_relative_path,
check_inventory,
delegate_inventory,
)
from .data import (
data_context,
)
HTTPTESTER_HOSTS = (
'ansible.http.tests',
'sni1.ansible.http.tests',
'fail.ansible.http.tests',
'self-signed.ansible.http.tests',
)
def check_startup():
"""Checks to perform at startup before running commands."""
check_legacy_modules()
def check_legacy_modules():
"""Detect conflicts with legacy core/extras module directories to avoid problems later."""
for directory in 'core', 'extras':
path = 'lib/ansible/modules/%s' % directory
for root, _dir_names, file_names in os.walk(path):
if file_names:
# the directory shouldn't exist, but if it does, it must contain no files
raise ApplicationError('Files prohibited in "%s". '
'These are most likely legacy modules from version 2.2 or earlier.' % root)
def create_shell_command(command):
"""
:type command: list[str]
:rtype: list[str]
"""
optional_vars = (
'TERM',
)
cmd = ['/usr/bin/env']
cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
cmd += command
return cmd
def get_openssl_version(args, python, python_version): # type: (EnvironmentConfig, str, str) -> t.Optional[t.Tuple[int, ...]]
"""Return the openssl version."""
if not python_version.startswith('2.'):
# OpenSSL version checking only works on Python 3.x.
# This should be the most accurate, since it is the Python we will be using.
version = json.loads(run_command(args, [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'sslcheck.py')], capture=True, always=True)[0])['version']
if version:
display.info('Detected OpenSSL version %s under Python %s.' % (version_to_str(version), python_version), verbosity=1)
return tuple(version)
# Fall back to detecting the OpenSSL version from the CLI.
# This should provide an adequate solution on Python 2.x.
openssl_path = find_executable('openssl', required=False)
if openssl_path:
try:
result = raw_command([openssl_path, 'version'], capture=True)[0]
except SubprocessError:
result = ''
match = re.search(r'^OpenSSL (?P<version>[0-9]+\.[0-9]+\.[0-9]+)', result)
if match:
version = str_to_version(match.group('version'))
display.info('Detected OpenSSL version %s using the openssl CLI.' % version_to_str(version), verbosity=1)
return version
display.info('Unable to detect OpenSSL version.', verbosity=1)
return None
def get_setuptools_version(args, python): # type: (EnvironmentConfig, str) -> t.Tuple[int]
"""Return the setuptools version for the given python."""
try:
return str_to_version(raw_command([python, '-c', 'import setuptools; print(setuptools.__version__)'], capture=True)[0])
except SubprocessError:
if args.explain:
return tuple() # ignore errors in explain mode in case setuptools is not aleady installed
raise
def install_cryptography(args, python, python_version, pip): # type: (EnvironmentConfig, str, str, t.List[str]) -> None
"""
Install cryptography for the specified environment.
"""
# make sure ansible-test's basic requirements are met before continuing
# this is primarily to ensure that pip is new enough to facilitate further requirements installation
install_ansible_test_requirements(args, pip)
# make sure setuptools is available before trying to install cryptography
# the installed version of setuptools affects the version of cryptography to install
run_command(args, generate_pip_install(pip, '', packages=['setuptools']))
# install the latest cryptography version that the current requirements can support
# use a custom constraints file to avoid the normal constraints file overriding the chosen version of cryptography
# if not installed here later install commands may try to install an unsupported version due to the presence of older setuptools
# this is done instead of upgrading setuptools to allow tests to function with older distribution provided versions of setuptools
run_command(args, generate_pip_install(pip, '',
packages=[get_cryptography_requirement(args, python, python_version)],
constraints=os.path.join(ANSIBLE_TEST_DATA_ROOT, 'cryptography-constraints.txt')))
def get_cryptography_requirement(args, python, python_version): # type: (EnvironmentConfig, str, str) -> str
"""
Return the correct cryptography requirement for the given python version.
The version of cryptography installed depends on the python version, setuptools version and openssl version.
"""
setuptools_version = get_setuptools_version(args, python)
openssl_version = get_openssl_version(args, python, python_version)
if setuptools_version >= (18, 5):
if python_version == '2.6':
# cryptography 2.2+ requires python 2.7+
# see https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst#22---2018-03-19
cryptography = 'cryptography < 2.2'
elif openssl_version and openssl_version < (1, 1, 0):
# cryptography 3.2 requires openssl 1.1.x or later
# see https://cryptography.io/en/latest/changelog.html#v3-2
cryptography = 'cryptography < 3.2'
else:
# cryptography 3.4+ fails to install on many systems
# this is a temporary work-around until a more permanent solution is available
cryptography = 'cryptography < 3.4'
else:
# cryptography 2.1+ requires setuptools 18.5+
# see https://github.com/pyca/cryptography/blob/62287ae18383447585606b9d0765c0f1b8a9777c/setup.py#L26
cryptography = 'cryptography < 2.1'
return cryptography
def install_command_requirements(args, python_version=None, context=None, enable_pyyaml_check=False):
"""
:type args: EnvironmentConfig
:type python_version: str | None
:type context: str | None
:type enable_pyyaml_check: bool
"""
if not args.explain:
make_dirs(ResultType.COVERAGE.path)
make_dirs(ResultType.DATA.path)
if isinstance(args, ShellConfig):
if args.raw:
return
if not args.requirements:
return
if isinstance(args, ShellConfig):
return
packages = []
if isinstance(args, TestConfig):
if args.coverage:
packages.append('coverage')
if args.junit:
packages.append('junit-xml')
if not python_version:
python_version = args.python_version
python = find_python(python_version)
pip = generate_pip_command(python)
# skip packages which have aleady been installed for python_version
try:
package_cache = install_command_requirements.package_cache
except AttributeError:
package_cache = install_command_requirements.package_cache = {}
installed_packages = package_cache.setdefault(python_version, set())
skip_packages = [package for package in packages if package in installed_packages]
for package in skip_packages:
packages.remove(package)
installed_packages.update(packages)
if args.command != 'sanity':
install_cryptography(args, python, python_version, pip)
commands = [generate_pip_install(pip, args.command, packages=packages, context=context)]
if isinstance(args, IntegrationConfig):
for cloud_platform in get_cloud_platforms(args):
commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
commands = [cmd for cmd in commands if cmd]
if not commands:
return # no need to detect changes or run pip check since we are not making any changes
# only look for changes when more than one requirements file is needed
detect_pip_changes = len(commands) > 1
# first pass to install requirements, changes expected unless environment is already set up
install_ansible_test_requirements(args, pip)
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if changes:
# second pass to check for conflicts in requirements, changes are not expected here
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if changes:
raise ApplicationError('Conflicts detected in requirements. The following commands reported changes during verification:\n%s' %
'\n'.join((' '.join(cmd_quote(c) for c in cmd) for cmd in changes)))
if args.pip_check:
# ask pip to check for conflicts between installed packages
try:
run_command(args, pip + ['check', '--disable-pip-version-check'], capture=True)
except SubprocessError as ex:
if ex.stderr.strip() == 'ERROR: unknown command "check"':
display.warning('Cannot check pip requirements for conflicts because "pip check" is not supported.')
else:
raise
if enable_pyyaml_check:
# pyyaml may have been one of the requirements that was installed, so perform an optional check for it
check_pyyaml(args, python_version, required=False)
def install_ansible_test_requirements(args, pip): # type: (EnvironmentConfig, t.List[str]) -> None
"""Install requirements for ansible-test for the given pip if not already installed."""
try:
installed = install_command_requirements.installed
except AttributeError:
installed = install_command_requirements.installed = set()
if tuple(pip) in installed:
return
# make sure basic ansible-test requirements are met, including making sure that pip is recent enough to support constraints
# virtualenvs created by older distributions may include very old pip versions, such as those created in the centos6 test container (pip 6.0.8)
run_command(args, generate_pip_install(pip, 'ansible-test', use_constraints=False))
installed.add(tuple(pip))
def run_pip_commands(args, pip, commands, detect_pip_changes=False):
"""
:type args: EnvironmentConfig
:type pip: list[str]
:type commands: list[list[str]]
:type detect_pip_changes: bool
:rtype: list[list[str]]
"""
changes = []
after_list = pip_list(args, pip) if detect_pip_changes else None
for cmd in commands:
if not cmd:
continue
before_list = after_list
run_command(args, cmd)
after_list = pip_list(args, pip) if detect_pip_changes else None
if before_list != after_list:
changes.append(cmd)
return changes
def pip_list(args, pip):
"""
:type args: EnvironmentConfig
:type pip: list[str]
:rtype: str
"""
stdout = run_command(args, pip + ['list'], capture=True)[0]
return stdout
def generate_pip_install(pip, command, packages=None, constraints=None, use_constraints=True, context=None):
"""
:type pip: list[str]
:type command: str
:type packages: list[str] | None
:type constraints: str | None
:type use_constraints: bool
:type context: str | None
:rtype: list[str] | None
"""
constraints = constraints or os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'constraints.txt')
requirements = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', '%s.txt' % ('%s.%s' % (command, context) if context else command))
content_constraints = None
options = []
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
if command == 'sanity' and data_context().content.is_ansible:
requirements = os.path.join(data_context().content.sanity_path, 'code-smell', '%s.requirements.txt' % context)
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
if command == 'units':
requirements = os.path.join(data_context().content.unit_path, 'requirements.txt')
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
content_constraints = os.path.join(data_context().content.unit_path, 'constraints.txt')
if command in ('integration', 'windows-integration', 'network-integration'):
requirements = os.path.join(data_context().content.integration_path, 'requirements.txt')
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
requirements = os.path.join(data_context().content.integration_path, '%s.requirements.txt' % command)
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
content_constraints = os.path.join(data_context().content.integration_path, 'constraints.txt')
if command.startswith('integration.cloud.'):
content_constraints = os.path.join(data_context().content.integration_path, 'constraints.txt')
if packages:
options += packages
if not options:
return None
if use_constraints:
if content_constraints and os.path.exists(content_constraints) and os.path.getsize(content_constraints):
# listing content constraints first gives them priority over constraints provided by ansible-test
options.extend(['-c', content_constraints])
options.extend(['-c', constraints])
return pip + ['install', '--disable-pip-version-check'] + options
def command_shell(args):
"""
:type args: ShellConfig
"""
if args.delegate:
raise Delegate()
install_command_requirements(args)
if args.inject_httptester:
inject_httptester(args)
cmd = create_shell_command(['bash', '-i'])
run_command(args, cmd)
def command_posix_integration(args):
"""
:type args: PosixIntegrationConfig
"""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
inventory_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, os.path.basename(inventory_relative_path))
all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets)
command_integration_filtered(args, internal_targets, all_targets, inventory_path)
def command_network_integration(args):
"""
:type args: NetworkIntegrationConfig
"""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
if args.inventory:
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
else:
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
if args.no_temp_workdir:
# temporary solution to keep DCI tests working
inventory_exists = os.path.exists(inventory_path)
else:
inventory_exists = os.path.isfile(inventory_path)
if not args.explain and not args.platform and not inventory_exists:
raise ApplicationError(
'Inventory not found: %s\n'
'Use --inventory to specify the inventory path.\n'
'Use --platform to provision resources and generate an inventory file.\n'
'See also inventory template: %s' % (inventory_path, template_path)
)
check_inventory(args, inventory_path)
delegate_inventory(args, inventory_path)
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
instances = [] # type: t.List[WrappedThread]
if args.platform:
get_python_path(args, args.python_executable) # initialize before starting threads
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
config = configs.get(platform_version)
if not config:
continue
instance = WrappedThread(functools.partial(network_run, args, platform, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = network_inventory(args, remotes)
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
if not args.explain:
write_text_file(inventory_path, inventory)
success = False
try:
command_integration_filtered(args, internal_targets, all_targets, inventory_path)
success = True
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
instance.result.stop()
def network_init(args, internal_targets): # type: (NetworkIntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> None
"""Initialize platforms for network integration tests."""
if not args.platform:
return
if args.metadata.instance_config is not None:
return
platform_targets = set(a for target in internal_targets for a in target.aliases if a.startswith('network/'))
instances = [] # type: t.List[WrappedThread]
# generate an ssh key (if needed) up front once, instead of for each instance
SshKey(args)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
platform_target = 'network/%s/' % platform
if platform_target not in platform_targets:
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
platform_version, platform))
continue
instance = WrappedThread(functools.partial(network_start, args, platform, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def network_start(args, platform, version):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
core_ci.start()
return core_ci.save()
def network_run(args, platform, version, config):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageNetworkCI(args, core_ci)
manage.wait()
return core_ci
def network_inventory(args, remotes):
"""
:type args: NetworkIntegrationConfig
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
groups = dict([(remote.platform, []) for remote in remotes])
net = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_ssh_private_key_file=os.path.abspath(remote.ssh_key.key),
)
settings = get_network_settings(args, remote.platform, remote.version)
options.update(settings.inventory_vars)
groups[remote.platform].append(
'%s %s' % (
remote.name.replace('.', '-'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
net.append(remote.platform)
groups['net:children'] = net
template = ''
for group in groups:
hosts = '\n'.join(groups[group])
template += textwrap.dedent("""
[%s]
%s
""") % (group, hosts)
inventory = template
return inventory
def command_windows_integration(args):
"""
:type args: WindowsIntegrationConfig
"""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
if args.inventory:
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
else:
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
if not args.explain and not args.windows and not os.path.isfile(inventory_path):
raise ApplicationError(
'Inventory not found: %s\n'
'Use --inventory to specify the inventory path.\n'
'Use --windows to provision resources and generate an inventory file.\n'
'See also inventory template: %s' % (inventory_path, template_path)
)
check_inventory(args, inventory_path)
delegate_inventory(args, inventory_path)
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
instances = [] # type: t.List[WrappedThread]
pre_target = None
post_target = None
httptester_id = None
if args.windows:
get_python_path(args, args.python_executable) # initialize before starting threads
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
for version in args.windows:
config = configs['windows/%s' % version]
instance = WrappedThread(functools.partial(windows_run, args, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
remotes = [instance.wait_for_result() for instance in instances]
inventory = windows_inventory(remotes)
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
if not args.explain:
write_text_file(inventory_path, inventory)
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in internal_targets)
# if running under Docker delegation, the httptester may have already been started
docker_httptester = bool(os.environ.get("HTTPTESTER", False))
if use_httptester and not docker_available() and not docker_httptester:
display.warning('Assuming --disable-httptester since `docker` is not available.')
elif use_httptester:
if docker_httptester:
# we are running in a Docker container that is linked to the httptester container, we just need to
# forward these requests to the linked hostname
first_host = HTTPTESTER_HOSTS[0]
ssh_options = [
"-R", "8080:%s:80" % first_host,
"-R", "8443:%s:443" % first_host,
"-R", "8444:%s:444" % first_host
]
else:
# we are running directly and need to start the httptester container ourselves and forward the port
# from there manually set so HTTPTESTER env var is set during the run
args.inject_httptester = True
httptester_id, ssh_options = start_httptester(args)
# to get this SSH command to run in the background we need to set to run in background (-f) and disable
# the pty allocation (-T)
ssh_options.insert(0, "-fT")
# create a script that will continue to run in the background until the script is deleted, this will
# cleanup and close the connection
def forward_ssh_ports(target):
"""
:type target: IntegrationTarget
"""
if 'needs/httptester/' not in target.aliases:
return
for remote in [r for r in remotes if r.version != '2008']:
manage = ManageWindowsCI(remote)
manage.upload(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'windows-httptester.ps1'), watcher_path)
# We cannot pass an array of string with -File so we just use a delimiter for multiple values
script = "powershell.exe -NoProfile -ExecutionPolicy Bypass -File .\\%s -Hosts \"%s\"" \
% (watcher_path, "|".join(HTTPTESTER_HOSTS))
if args.verbosity > 3:
script += " -Verbose"
manage.ssh(script, options=ssh_options, force_pty=False)
def cleanup_ssh_ports(target):
"""
:type target: IntegrationTarget
"""
if 'needs/httptester/' not in target.aliases:
return
for remote in [r for r in remotes if r.version != '2008']:
# delete the tmp file that keeps the http-tester alive
manage = ManageWindowsCI(remote)
manage.ssh("cmd.exe /c \"del %s /F /Q\"" % watcher_path, force_pty=False)
watcher_path = "ansible-test-http-watcher-%s.ps1" % time.time()
pre_target = forward_ssh_ports
post_target = cleanup_ssh_ports
def run_playbook(playbook, run_playbook_vars): # type: (str, t.Dict[str, t.Any]) -> None
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(run_playbook_vars)]
if args.verbosity:
command.append('-%s' % ('v' * args.verbosity))
env = ansible_environment(args)
intercept_command(args, command, '', env, disable_coverage=True)
remote_temp_path = None
if args.coverage and not args.coverage_check:
# Create the remote directory that is writable by everyone. Use Ansible to talk to the remote host.
remote_temp_path = 'C:\\ansible_test_coverage_%s' % time.time()
playbook_vars = {'remote_temp_path': remote_temp_path}
run_playbook('windows_coverage_setup.yml', playbook_vars)
success = False
try:
command_integration_filtered(args, internal_targets, all_targets, inventory_path, pre_target=pre_target,
post_target=post_target, remote_temp_path=remote_temp_path)
success = True
finally:
if httptester_id:
docker_rm(args, httptester_id)
if remote_temp_path:
# Zip up the coverage files that were generated and fetch it back to localhost.
with tempdir() as local_temp_path:
playbook_vars = {'remote_temp_path': remote_temp_path, 'local_temp_path': local_temp_path}
run_playbook('windows_coverage_teardown.yml', playbook_vars)
for filename in os.listdir(local_temp_path):
with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip:
coverage_zip.extractall(ResultType.COVERAGE.path)
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
instance.result.stop()
# noinspection PyUnusedLocal
def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
"""
:type args: WindowsIntegrationConfig
:type internal_targets: tuple[IntegrationTarget]
"""
if not args.windows:
return
if args.metadata.instance_config is not None:
return
instances = [] # type: t.List[WrappedThread]
for version in args.windows:
instance = WrappedThread(functools.partial(windows_start, args, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def windows_start(args, version):
"""
:type args: WindowsIntegrationConfig
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider)
core_ci.start()
return core_ci.save()
def windows_run(args, version, config):
"""
:type args: WindowsIntegrationConfig
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageWindowsCI(core_ci)
manage.wait()
return core_ci
def windows_inventory(remotes):
"""
:type remotes: list[AnsibleCoreCI]
:rtype: str
"""
hosts = []
for remote in remotes:
options = dict(
ansible_host=remote.connection.hostname,
ansible_user=remote.connection.username,
ansible_password=remote.connection.password,
ansible_port=remote.connection.port,
)
# used for the connection_windows_ssh test target
if remote.ssh_key:
options["ansible_ssh_private_key_file"] = os.path.abspath(remote.ssh_key.key)
if remote.name == 'windows-2008':
options.update(
# force 2008 to use PSRP for the connection plugin
ansible_connection='psrp',
ansible_psrp_auth='basic',
ansible_psrp_cert_validation='ignore',
)
elif remote.name == 'windows-2016':
options.update(
# force 2016 to use NTLM + HTTP message encryption
ansible_connection='winrm',
ansible_winrm_server_cert_validation='ignore',
ansible_winrm_transport='ntlm',
ansible_winrm_scheme='http',
ansible_port='5985',
)
else:
options.update(
ansible_connection='winrm',
ansible_winrm_server_cert_validation='ignore',
)
hosts.append(
'%s %s' % (
remote.name.replace('/', '_'),
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
)
)
template = """
[windows]
%s
# support winrm binary module tests (temporary solution)
[testhost:children]
windows
"""
template = textwrap.dedent(template)
inventory = template % ('\n'.join(hosts))
return inventory
def command_integration_filter(args, # type: TIntegrationConfig
targets, # type: t.Iterable[TIntegrationTarget]
init_callback=None, # type: t.Callable[[TIntegrationConfig, t.Tuple[TIntegrationTarget, ...]], None]
): # type: (...) -> t.Tuple[TIntegrationTarget, ...]
"""Filter the given integration test targets."""
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
changes = get_changes_filter(args)
# special behavior when the --changed-all-target target is selected based on changes
if args.changed_all_target in changes:
# act as though the --changed-all-target target was in the include list
if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
args.include.append(args.changed_all_target)
args.delegate_args += ['--include', args.changed_all_target]
# act as though the --changed-all-target target was in the exclude list
elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
args.exclude.append(args.changed_all_target)
require = args.require + changes
exclude = args.exclude
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
environment_exclude = get_integration_filter(args, internal_targets)
environment_exclude += cloud_filter(args, internal_targets)
if environment_exclude:
exclude += environment_exclude
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
if not internal_targets:
raise AllTargetsSkipped()
if args.start_at and not any(target.name == args.start_at for target in internal_targets):
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
if init_callback:
init_callback(args, internal_targets)
cloud_init(args, internal_targets)
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
if os.path.exists(vars_file_src):
def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""
Add the integration config vars file to the payload file list.
This will preserve the file during delegation even if the file is ignored by source control.
"""
files.append((vars_file_src, data_context().content.integration_vars_path))
data_context().register_payload_callback(integration_config_callback)
if args.delegate:
raise Delegate(require=require, exclude=exclude, integration_targets=internal_targets)
install_command_requirements(args)
return internal_targets
def command_integration_filtered(args, targets, all_targets, inventory_path, pre_target=None, post_target=None,
remote_temp_path=None):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:type all_targets: tuple[IntegrationTarget]
:type inventory_path: str
:type pre_target: (IntegrationTarget) -> None | None
:type post_target: (IntegrationTarget) -> None | None
:type remote_temp_path: str | None
"""
found = False
passed = []
failed = []
targets_iter = iter(targets)
all_targets_dict = dict((target.name, target) for target in all_targets)
setup_errors = []
setup_targets_executed = set()
for target in all_targets:
for setup_target in target.setup_once + target.setup_always:
if setup_target not in all_targets_dict:
setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))
if setup_errors:
raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
check_pyyaml(args, args.python_version)
test_dir = os.path.join(ResultType.TMP.path, 'output_dir')
if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
max_tries = 20
display.info('SSH service required for tests. Checking to make sure we can connect.')
for i in range(1, max_tries + 1):
try:
run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
display.info('SSH service responded.')
break
except SubprocessError:
if i == max_tries:
raise
seconds = 3
display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
time.sleep(seconds)
# Windows is different as Ansible execution is done locally but the host is remote
if args.inject_httptester and not isinstance(args, WindowsIntegrationConfig):
inject_httptester(args)
start_at_task = args.start_at_task
results = {}
current_environment = None # type: t.Optional[EnvironmentDescription]
# common temporary directory path that will be valid on both the controller and the remote
# it must be common because it will be referenced in environment variables that are shared across multiple hosts
common_temp_path = '/tmp/ansible-test-%s' % ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(8))
setup_common_temp_dir(args, common_temp_path)
try:
for target in targets_iter:
if args.start_at and not found:
found = target.name == args.start_at
if not found:
continue
if args.list_targets:
print(target.name)
continue
tries = 2 if args.retry_on_error else 1
verbosity = args.verbosity
cloud_environment = get_cloud_environment(args, target)
original_environment = current_environment if current_environment else EnvironmentDescription(args)
current_environment = None
display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)
try:
while tries:
tries -= 1
try:
if cloud_environment:
cloud_environment.setup_once()
run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, False)
start_time = time.time()
run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, True)
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
if pre_target:
pre_target(target)
try:
if target.script_path:
command_integration_script(args, target, test_dir, inventory_path, common_temp_path,
remote_temp_path=remote_temp_path)
else:
command_integration_role(args, target, start_at_task, test_dir, inventory_path,
common_temp_path, remote_temp_path=remote_temp_path)
start_at_task = None
finally:
if post_target:
post_target(target)
end_time = time.time()
results[target.name] = dict(
name=target.name,
type=target.type,
aliases=target.aliases,
modules=target.modules,
run_time_seconds=int(end_time - start_time),
setup_once=target.setup_once,
setup_always=target.setup_always,
coverage=args.coverage,
coverage_label=args.coverage_label,
python_version=args.python_version,
)
break
except SubprocessError:
if cloud_environment:
cloud_environment.on_failure(target, tries)
if not original_environment.validate(target.name, throw=False):
raise
if not tries:
raise
display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
display.verbosity = args.verbosity = 6
start_time = time.time()
current_environment = EnvironmentDescription(args)
end_time = time.time()
EnvironmentDescription.check(original_environment, current_environment, target.name, throw=True)
results[target.name]['validation_seconds'] = int(end_time - start_time)
passed.append(target)
except Exception as ex:
failed.append(target)
if args.continue_on_error:
display.error(ex)
continue
display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
next_target = next(targets_iter, None)
if next_target:
display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
raise
finally:
display.verbosity = args.verbosity = verbosity
finally:
if not args.explain:
if args.coverage:
coverage_temp_path = os.path.join(common_temp_path, ResultType.COVERAGE.name)
coverage_save_path = ResultType.COVERAGE.path
for filename in os.listdir(coverage_temp_path):
shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename))
remove_tree(common_temp_path)
result_name = '%s-%s.json' % (
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
data = dict(
targets=results,
)
write_json_test_results(ResultType.DATA, result_name, data)
if failed:
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
def start_httptester(args):
"""
:type args: EnvironmentConfig
:rtype: str, list[str]
"""
# map ports from remote -> localhost -> container
# passing through localhost is only used when ansible-test is not already running inside a docker container
ports = [
dict(
remote=8080,
container=80,
),
dict(
remote=8088,
container=88,
),
dict(
remote=8443,
container=443,
),
dict(
remote=8444,
container=444,
),
dict(
remote=8749,
container=749,
),
]
container_id = get_docker_container_id()
if not container_id:
for item in ports:
item['localhost'] = get_available_port()
docker_pull(args, args.httptester)
httptester_id = run_httptester(args, dict((port['localhost'], port['container']) for port in ports if 'localhost' in port))
if container_id:
container_host = get_docker_container_ip(args, httptester_id)
display.info('Found httptester container address: %s' % container_host, verbosity=1)
else:
container_host = get_docker_hostname()
ssh_options = []
for port in ports:
ssh_options += ['-R', '%d:%s:%d' % (port['remote'], container_host, port.get('localhost', port['container']))]
return httptester_id, ssh_options
def run_httptester(args, ports=None):
"""
:type args: EnvironmentConfig
:type ports: dict[int, int] | None
:rtype: str
"""
options = [
'--detach',
'--env', 'KRB5_PASSWORD=%s' % args.httptester_krb5_password,
]
if ports:
for localhost_port, container_port in ports.items():
options += ['-p', '%d:%d' % (localhost_port, container_port)]
network = get_docker_preferred_network_name(args)
if is_docker_user_defined_network(network):
# network-scoped aliases are only supported for containers in user defined networks
for alias in HTTPTESTER_HOSTS:
options.extend(['--network-alias', alias])
httptester_id = docker_run(args, args.httptester, options=options)[0]
if args.explain:
httptester_id = 'httptester_id'
else:
httptester_id = httptester_id.strip()
return httptester_id
def inject_httptester(args):
"""
:type args: CommonConfig
"""
comment = ' # ansible-test httptester\n'
append_lines = ['127.0.0.1 %s%s' % (host, comment) for host in HTTPTESTER_HOSTS]
hosts_path = '/etc/hosts'
original_lines = read_text_file(hosts_path).splitlines(True)
if not any(line.endswith(comment) for line in original_lines):
write_text_file(hosts_path, ''.join(original_lines + append_lines))
# determine which forwarding mechanism to use
pfctl = find_executable('pfctl', required=False)
iptables = find_executable('iptables', required=False)
if pfctl:
kldload = find_executable('kldload', required=False)
if kldload:
try:
run_command(args, ['kldload', 'pf'], capture=True)
except SubprocessError:
pass # already loaded
rules = '''
rdr pass inet proto tcp from any to any port 80 -> 127.0.0.1 port 8080
rdr pass inet proto tcp from any to any port 88 -> 127.0.0.1 port 8088
rdr pass inet proto tcp from any to any port 443 -> 127.0.0.1 port 8443
rdr pass inet proto tcp from any to any port 444 -> 127.0.0.1 port 8444
rdr pass inet proto tcp from any to any port 749 -> 127.0.0.1 port 8749
'''
cmd = ['pfctl', '-ef', '-']
try:
run_command(args, cmd, capture=True, data=rules)
except SubprocessError:
pass # non-zero exit status on success
elif iptables:
ports = [
(80, 8080),
(88, 8088),
(443, 8443),
(444, 8444),
(749, 8749),
]
for src, dst in ports:
rule = ['-o', 'lo', '-p', 'tcp', '--dport', str(src), '-j', 'REDIRECT', '--to-port', str(dst)]
try:
# check for existing rule
cmd = ['iptables', '-t', 'nat', '-C', 'OUTPUT'] + rule
run_command(args, cmd, capture=True)
except SubprocessError:
# append rule when it does not exist
cmd = ['iptables', '-t', 'nat', '-A', 'OUTPUT'] + rule
run_command(args, cmd, capture=True)
else:
raise ApplicationError('No supported port forwarding mechanism detected.')
def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, inventory_path, temp_path, always):
"""
:type args: IntegrationConfig
:type test_dir: str
:type target_names: list[str]
:type targets_dict: dict[str, IntegrationTarget]
:type targets_executed: set[str]
:type inventory_path: str
:type temp_path: str
:type always: bool
"""
for target_name in target_names:
if not always and target_name in targets_executed:
continue
target = targets_dict[target_name]
if not args.explain:
# create a fresh test directory for each test target
remove_tree(test_dir)
make_dirs(test_dir)
if target.script_path:
command_integration_script(args, target, test_dir, inventory_path, temp_path)
else:
command_integration_role(args, target, None, test_dir, inventory_path, temp_path)
targets_executed.add(target_name)
def integration_environment(args, target, test_dir, inventory_path, ansible_config, env_config):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type test_dir: str
:type inventory_path: str
:type ansible_config: str | None
:type env_config: CloudEnvironmentConfig | None
:rtype: dict[str, str]
"""
env = ansible_environment(args, ansible_config=ansible_config)
if args.inject_httptester:
env.update(dict(
HTTPTESTER='1',
KRB5_PASSWORD=args.httptester_krb5_password,
))
callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
integration = dict(
JUNIT_OUTPUT_DIR=ResultType.JUNIT.path,
ANSIBLE_CALLBACKS_ENABLED=','.join(sorted(set(callback_plugins))),
ANSIBLE_TEST_CI=args.metadata.ci_provider or get_ci_provider().code,
ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),
OUTPUT_DIR=test_dir,
INVENTORY_PATH=os.path.abspath(inventory_path),
)
if args.debug_strategy:
env.update(dict(ANSIBLE_STRATEGY='debug'))
if 'non_local/' in target.aliases:
if args.coverage:
display.warning('Skipping coverage reporting on Ansible modules for non-local test: %s' % target.name)
env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
env.update(integration)
return env
def command_integration_script(args, target, test_dir, inventory_path, temp_path, remote_temp_path=None):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type test_dir: str
:type inventory_path: str
:type temp_path: str
:type remote_temp_path: str | None
"""
display.info('Running %s integration test script' % target.name)
env_config = None
if isinstance(args, PosixIntegrationConfig):
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
env_config = cloud_environment.get_environment_config()
with integration_test_environment(args, target, inventory_path) as test_env:
cmd = ['./%s' % os.path.basename(target.script_path)]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
cwd = os.path.join(test_env.targets_dir, target.relative_path)
env.update(dict(
# support use of adhoc ansible commands in collections without specifying the fully qualified collection name
ANSIBLE_PLAYBOOK_DIR=cwd,
))
if env_config and env_config.env_vars:
env.update(env_config.env_vars)
with integration_test_config_file(args, env_config, test_env.integration_dir) as config_path:
if config_path:
cmd += ['-e', '@%s' % config_path]
module_coverage = 'non_local/' not in target.aliases
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
remote_temp_path=remote_temp_path, module_coverage=module_coverage)
def command_integration_role(args, target, start_at_task, test_dir, inventory_path, temp_path, remote_temp_path=None):
"""
:type args: IntegrationConfig
:type target: IntegrationTarget
:type start_at_task: str | None
:type test_dir: str
:type inventory_path: str
:type temp_path: str
:type remote_temp_path: str | None
"""
display.info('Running %s integration test role' % target.name)
env_config = None
vars_files = []
variables = dict(
output_dir=test_dir,
)
if isinstance(args, WindowsIntegrationConfig):
hosts = 'windows'
gather_facts = False
variables.update(dict(
win_output_dir=r'C:\ansible_testing',
))
elif isinstance(args, NetworkIntegrationConfig):
hosts = target.network_platform
gather_facts = False
else:
hosts = 'testhost'
gather_facts = True
cloud_environment = get_cloud_environment(args, target)
if cloud_environment:
env_config = cloud_environment.get_environment_config()
with integration_test_environment(args, target, inventory_path) as test_env:
if os.path.exists(test_env.vars_file):
vars_files.append(os.path.relpath(test_env.vars_file, test_env.integration_dir))
play = dict(
hosts=hosts,
gather_facts=gather_facts,
vars_files=vars_files,
vars=variables,
roles=[
target.name,
],
)
if env_config:
if env_config.ansible_vars:
variables.update(env_config.ansible_vars)
play.update(dict(
environment=env_config.env_vars,
module_defaults=env_config.module_defaults,
))
playbook = json.dumps([play], indent=4, sort_keys=True)
with named_temporary_file(args=args, directory=test_env.integration_dir, prefix='%s-' % target.name, suffix='.yml', content=playbook) as playbook_path:
filename = os.path.basename(playbook_path)
display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
cmd = ['ansible-playbook', filename, '-i', os.path.relpath(test_env.inventory_path, test_env.integration_dir)]
if start_at_task:
cmd += ['--start-at-task', start_at_task]
if args.tags:
cmd += ['--tags', args.tags]
if args.skip_tags:
cmd += ['--skip-tags', args.skip_tags]
if args.diff:
cmd += ['--diff']
if isinstance(args, NetworkIntegrationConfig):
if args.testcase:
cmd += ['-e', 'testcase=%s' % args.testcase]
if args.verbosity:
cmd.append('-' + ('v' * args.verbosity))
env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
cwd = test_env.integration_dir
env.update(dict(
# support use of adhoc ansible commands in collections without specifying the fully qualified collection name
ANSIBLE_PLAYBOOK_DIR=cwd,
))
env['ANSIBLE_ROLES_PATH'] = test_env.targets_dir
module_coverage = 'non_local/' not in target.aliases
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
remote_temp_path=remote_temp_path, module_coverage=module_coverage)
def get_changes_filter(args):
"""
:type args: TestConfig
:rtype: list[str]
"""
paths = detect_changes(args)
if not args.metadata.change_description:
if paths:
changes = categorize_changes(args, paths, args.command)
else:
changes = ChangeDescription()
args.metadata.change_description = changes
if paths is None:
return [] # change detection not enabled, do not filter targets
if not paths:
raise NoChangesDetected()
if args.metadata.change_description.targets is None:
raise NoTestsForChanges()
return args.metadata.change_description.targets
def detect_changes(args):
"""
:type args: TestConfig
:rtype: list[str] | None
"""
if args.changed:
paths = get_ci_provider().detect_changes(args)
elif args.changed_from or args.changed_path:
paths = args.changed_path or []
if args.changed_from:
paths += read_text_file(args.changed_from).splitlines()
else:
return None # change detection not enabled
if paths is None:
return None # act as though change detection not enabled, do not filter targets
display.info('Detected changes in %d file(s).' % len(paths))
for path in paths:
display.info(path, verbosity=1)
return paths
def get_integration_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
if args.docker:
return get_integration_docker_filter(args, targets)
if args.remote:
return get_integration_remote_filter(args, targets)
return get_integration_local_filter(args, targets)
def common_integration_filter(args, targets, exclude):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:type exclude: list[str]
"""
override_disabled = set(target for target in args.include if target.startswith('disabled/'))
if not args.allow_disabled:
skip = 'disabled/'
override = [target.name for target in targets if override_disabled & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-disabled or prefixing with "disabled/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_unsupported = set(target for target in args.include if target.startswith('unsupported/'))
if not args.allow_unsupported:
skip = 'unsupported/'
override = [target.name for target in targets if override_unsupported & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-unsupported or prefixing with "unsupported/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_unstable = set(target for target in args.include if target.startswith('unstable/'))
if args.allow_unstable_changed:
override_unstable |= set(args.metadata.change_description.focused_targets or [])
if not args.allow_unstable:
skip = 'unstable/'
override = [target.name for target in targets if override_unstable & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-unstable or prefixing with "unstable/": %s'
% (skip.rstrip('/'), ', '.join(skipped)))
# only skip a Windows test if using --windows and all the --windows versions are defined in the aliases as skip/windows/%s
if isinstance(args, WindowsIntegrationConfig) and args.windows:
all_skipped = []
not_skipped = []
for target in targets:
if "skip/windows/" not in target.aliases:
continue
skip_valid = []
skip_missing = []
for version in args.windows:
if "skip/windows/%s/" % version in target.aliases:
skip_valid.append(version)
else:
skip_missing.append(version)
if skip_missing and skip_valid:
not_skipped.append((target.name, skip_valid, skip_missing))
elif skip_valid:
all_skipped.append(target.name)
if all_skipped:
exclude.extend(all_skipped)
skip_aliases = ["skip/windows/%s/" % w for w in args.windows]
display.warning('Excluding tests marked "%s" which are set to skip with --windows %s: %s'
% ('", "'.join(skip_aliases), ', '.join(args.windows), ', '.join(all_skipped)))
if not_skipped:
for target, skip_valid, skip_missing in not_skipped:
# warn when failing to skip due to lack of support for skipping only some versions
display.warning('Including test "%s" which was marked to skip for --windows %s but not %s.'
% (target, ', '.join(skip_valid), ', '.join(skip_missing)))
def get_integration_local_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
common_integration_filter(args, targets, exclude)
if not args.allow_root and os.getuid() != 0:
skip = 'needs/root/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --allow-root or running as root: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
override_destructive = set(target for target in args.include if target.startswith('destructive/'))
if not args.allow_destructive:
skip = 'destructive/'
override = [target.name for target in targets if override_destructive & set(target.aliases)]
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
if skipped:
exclude.extend(skipped)
display.warning('Excluding tests marked "%s" which require --allow-destructive or prefixing with "destructive/" to run locally: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
exclude_targets_by_python_version(targets, args.python_version, exclude)
return exclude
def get_integration_docker_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
exclude = []
common_integration_filter(args, targets, exclude)
skip = 'skip/docker/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which cannot run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
if not args.docker_privileged:
skip = 'needs/privileged/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
python_version = get_python_version(args, get_docker_completion(), args.docker_raw)
exclude_targets_by_python_version(targets, python_version, exclude)
return exclude
def get_integration_remote_filter(args, targets):
"""
:type args: IntegrationConfig
:type targets: tuple[IntegrationTarget]
:rtype: list[str]
"""
remote = args.parsed_remote
exclude = []
common_integration_filter(args, targets, exclude)
skips = {
'skip/%s' % remote.platform: remote.platform,
'skip/%s/%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version),
'skip/%s%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version), # legacy syntax, use above format
}
if remote.arch:
skips.update({
'skip/%s/%s' % (remote.arch, remote.platform): '%s on %s' % (remote.platform, remote.arch),
'skip/%s/%s/%s' % (remote.arch, remote.platform, remote.version): '%s %s on %s' % (remote.platform, remote.version, remote.arch),
})
for skip, description in skips.items():
skipped = [target.name for target in targets if skip in target.skips]
if skipped:
exclude.append(skip + '/')
display.warning('Excluding tests marked "%s" which are not supported on %s: %s' % (skip, description, ', '.join(skipped)))
python_version = get_python_version(args, get_remote_completion(), args.remote)
exclude_targets_by_python_version(targets, python_version, exclude)
return exclude
def exclude_targets_by_python_version(targets, python_version, exclude):
"""
:type targets: tuple[IntegrationTarget]
:type python_version: str
:type exclude: list[str]
"""
if not python_version:
display.warning('Python version unknown. Unable to skip tests based on Python version.')
return
python_major_version = python_version.split('.')[0]
skip = 'skip/python%s/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
skip = 'skip/python%s/' % python_major_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
def get_python_version(args, configs, name):
"""
:type args: EnvironmentConfig
:type configs: dict[str, dict[str, str]]
:type name: str
"""
config = configs.get(name, {})
config_python = config.get('python')
if not config or not config_python:
if args.python:
return args.python
display.warning('No Python version specified. '
'Use completion config or the --python option to specify one.', unique=True)
return '' # failure to provide a version may result in failures or reduced functionality later
supported_python_versions = config_python.split(',')
default_python_version = supported_python_versions[0]
if args.python and args.python not in supported_python_versions:
raise ApplicationError('Python %s is not supported by %s. Supported Python version(s) are: %s' % (
args.python, name, ', '.join(sorted(supported_python_versions))))
python_version = args.python or default_python_version
return python_version
def get_python_interpreter(args, configs, name):
"""
:type args: EnvironmentConfig
:type configs: dict[str, dict[str, str]]
:type name: str
"""
if args.python_interpreter:
return args.python_interpreter
config = configs.get(name, {})
if not config:
if args.python:
guess = 'python%s' % args.python
else:
guess = 'python'
display.warning('Using "%s" as the Python interpreter. '
'Use completion config or the --python-interpreter option to specify the path.' % guess, unique=True)
return guess
python_version = get_python_version(args, configs, name)
python_dir = config.get('python_dir', '/usr/bin')
python_interpreter = os.path.join(python_dir, 'python%s' % python_version)
python_interpreter = config.get('python%s' % python_version, python_interpreter)
return python_interpreter
class EnvironmentDescription:
"""Description of current running environment."""
def __init__(self, args):
"""Initialize snapshot of environment configuration.
:type args: IntegrationConfig
"""
self.args = args
if self.args.explain:
self.data = {}
return
warnings = []
versions = ['']
versions += SUPPORTED_PYTHON_VERSIONS
versions += list(set(v.split('.')[0] for v in SUPPORTED_PYTHON_VERSIONS))
version_check = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'versions.py')
python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions))
pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v])
pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
known_hosts_hash = get_hash(os.path.expanduser('~/.ssh/known_hosts'))
for version in sorted(versions):
self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings)
for warning in warnings:
display.warning(warning, unique=True)
self.data = dict(
python_paths=python_paths,
pip_paths=pip_paths,
program_versions=program_versions,
pip_interpreters=pip_interpreters,
known_hosts_hash=known_hosts_hash,
warnings=warnings,
)
@staticmethod
def check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings):
"""
:type version: str
:param python_paths: dict[str, str]
:param pip_paths: dict[str, str]
:param pip_interpreters: dict[str, str]
:param warnings: list[str]
"""
python_label = 'Python%s' % (' %s' % version if version else '')
pip_path = pip_paths.get(version)
python_path = python_paths.get(version)
if not python_path or not pip_path:
# skip checks when either python or pip are missing for this version
return
pip_shebang = pip_interpreters.get(version)
match = re.search(r'#!\s*(?P<command>[^\s]+)', pip_shebang)
if not match:
warnings.append('A %s pip was found at "%s", but it does not have a valid shebang: %s' % (python_label, pip_path, pip_shebang))
return
pip_interpreter = os.path.realpath(match.group('command'))
python_interpreter = os.path.realpath(python_path)
if pip_interpreter == python_interpreter:
return
try:
identical = filecmp.cmp(pip_interpreter, python_interpreter)
except OSError:
identical = False
if identical:
return
warnings.append('A %s pip was found at "%s", but it uses interpreter "%s" instead of "%s".' % (
python_label, pip_path, pip_interpreter, python_interpreter))
def __str__(self):
"""
:rtype: str
"""
return json.dumps(self.data, sort_keys=True, indent=4)
def validate(self, target_name, throw):
"""
:type target_name: str
:type throw: bool
:rtype: bool
"""
current = EnvironmentDescription(self.args)
return self.check(self, current, target_name, throw)
@staticmethod
def check(original, current, target_name, throw):
"""
:type original: EnvironmentDescription
:type current: EnvironmentDescription
:type target_name: str
:type throw: bool
:rtype: bool
"""
original_json = str(original)
current_json = str(current)
if original_json == current_json:
return True
unified_diff = '\n'.join(difflib.unified_diff(
a=original_json.splitlines(),
b=current_json.splitlines(),
fromfile='original.json',
tofile='current.json',
lineterm='',
))
message = ('Test target "%s" has changed the test environment!\n'
'If these changes are necessary, they must be reverted before the test finishes.\n'
'>>> Original Environment\n'
'%s\n'
'>>> Current Environment\n'
'%s\n'
'>>> Environment Diff\n'
'%s'
% (target_name, original_json, current_json, unified_diff))
if throw:
raise ApplicationError(message)
display.error(message)
return False
@staticmethod
def get_version(command, warnings):
"""
:type command: list[str]
:type warnings: list[text]
:rtype: list[str]
"""
try:
stdout, stderr = raw_command(command, capture=True, cmd_verbosity=2)
except SubprocessError as ex:
warnings.append(u'%s' % ex)
return None # all failures are equal, we don't care why it failed, only that it did
return [line.strip() for line in ((stdout or '').strip() + (stderr or '').strip()).splitlines()]
@staticmethod
def get_shebang(path):
"""
:type path: str
:rtype: str
"""
with open_text_file(path) as script_fd:
return script_fd.readline().strip()
class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found."""
def __init__(self):
super(NoChangesDetected, self).__init__('No changes detected.')
class NoTestsForChanges(ApplicationWarning):
"""Exception when changes detected, but no tests trigger as a result."""
def __init__(self):
super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
class Delegate(Exception):
"""Trigger command delegation."""
def __init__(self, exclude=None, require=None, integration_targets=None):
"""
:type exclude: list[str] | None
:type require: list[str] | None
:type integration_targets: tuple[IntegrationTarget] | None
"""
super(Delegate, self).__init__()
self.exclude = exclude or []
self.require = require or []
self.integration_targets = integration_targets or tuple()
class AllTargetsSkipped(ApplicationWarning):
"""All targets skipped."""
def __init__(self):
super(AllTargetsSkipped, self).__init__('All targets skipped.')
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,146 |
docker_swarm contains deprecated call to be removed in 2.12
|
##### SUMMARY
docker_swarm contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
test/support/integration/plugins/modules/docker_swarm.py:451:12: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
test/support/integration/plugins/modules/docker_swarm.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74146
|
https://github.com/ansible/ansible/pull/74203
|
9c506031fa8340dd6d6ba68f66d32154fcb90d80
|
8c413749fc1062f293a77482710f22c234dd3ebd
| 2021-04-05T20:34:09Z |
python
| 2021-04-10T03:38:33Z |
test/sanity/ignore.txt
|
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
lib/ansible/cli/console.py pylint:blacklisted-name
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/cli/scripts/ansible_cli_stub.py pylint:ansible-deprecated-version
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/pycompat24.py no-get-exception
lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py no-basestring
lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
lib/ansible/module_utils/urls.py pylint:blacklisted-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
lib/ansible/modules/async_wrapper.py use-argspec-type-path
lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
lib/ansible/modules/copy.py pylint:blacklisted-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
lib/ansible/modules/git.py pylint:blacklisted-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/iptables.py pylint:blacklisted-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/pip.py pylint:blacklisted-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/uri.py pylint:blacklisted-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:blacklisted-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/conditional.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/play_context.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/async_status.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/inventory/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/script.py pylint:ansible-deprecated-version
lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
lib/ansible/plugins/strategy/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
lib/ansible/vars/fact_cache.py pylint:ansible-deprecated-version
lib/ansible/vars/hostvars.py pylint:blacklisted-name
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/lib/ansible_test/_data/setup/windows-httptester.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-argument-not-valid-type
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/integration/plugins/module_utils/database.py future-import-boilerplate
test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
test/support/integration/plugins/modules/docker_swarm.py pylint:ansible-deprecated-version
test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:blacklisted-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
test/units/modules/test_apt.py pylint:blacklisted-name
test/units/parsing/vault/test_vault.py pylint:blacklisted-name
test/units/playbook/role/test_role.py pylint:blacklisted-name
test/units/plugins/test_plugins.py pylint:blacklisted-name
test/units/template/test_templar.py pylint:blacklisted-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
test/utils/shippable/check_matrix.py replace-urlopen
test/utils/shippable/timing.py shebang
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,146 |
docker_swarm contains deprecated call to be removed in 2.12
|
##### SUMMARY
docker_swarm contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
test/support/integration/plugins/modules/docker_swarm.py:451:12: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
test/support/integration/plugins/modules/docker_swarm.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74146
|
https://github.com/ansible/ansible/pull/74203
|
9c506031fa8340dd6d6ba68f66d32154fcb90d80
|
8c413749fc1062f293a77482710f22c234dd3ebd
| 2021-04-05T20:34:09Z |
python
| 2021-04-10T03:38:33Z |
test/support/integration/plugins/modules/docker_swarm.py
|
#!/usr/bin/python
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_swarm
short_description: Manage Swarm cluster
version_added: "2.7"
description:
- Create a new Swarm cluster.
- Add/Remove nodes or managers to an existing cluster.
options:
advertise_addr:
description:
- Externally reachable address advertised to other nodes.
- This can either be an address/port combination
in the form C(192.168.1.1:4567), or an interface followed by a
port number, like C(eth0:4567).
- If the port number is omitted,
the port number from the listen address is used.
- If I(advertise_addr) is not specified, it will be automatically
detected when possible.
- Only used when swarm is initialised or joined. Because of this it's not
considered for idempotency checking.
type: str
default_addr_pool:
description:
- Default address pool in CIDR format.
- Only used when swarm is initialised. Because of this it's not considered
for idempotency checking.
- Requires API version >= 1.39.
type: list
elements: str
version_added: "2.8"
subnet_size:
description:
- Default address pool subnet mask length.
- Only used when swarm is initialised. Because of this it's not considered
for idempotency checking.
- Requires API version >= 1.39.
type: int
version_added: "2.8"
listen_addr:
description:
- Listen address used for inter-manager communication.
- This can either be an address/port combination in the form
C(192.168.1.1:4567), or an interface followed by a port number,
like C(eth0:4567).
- If the port number is omitted, the default swarm listening port
is used.
- Only used when swarm is initialised or joined. Because of this it's not
considered for idempotency checking.
type: str
default: 0.0.0.0:2377
force:
description:
- Use with state C(present) to force creating a new Swarm, even if already part of one.
- Use with state C(absent) to Leave the swarm even if this node is a manager.
type: bool
default: no
state:
description:
- Set to C(present), to create/update a new cluster.
- Set to C(join), to join an existing cluster.
- Set to C(absent), to leave an existing cluster.
- Set to C(remove), to remove an absent node from the cluster.
Note that removing requires Docker SDK for Python >= 2.4.0.
- Set to C(inspect) to display swarm informations.
type: str
default: present
choices:
- present
- join
- absent
- remove
- inspect
node_id:
description:
- Swarm id of the node to remove.
- Used with I(state=remove).
type: str
join_token:
description:
- Swarm token used to join a swarm cluster.
- Used with I(state=join).
type: str
remote_addrs:
description:
- Remote address of one or more manager nodes of an existing Swarm to connect to.
- Used with I(state=join).
type: list
elements: str
task_history_retention_limit:
description:
- Maximum number of tasks history stored.
- Docker default value is C(5).
type: int
snapshot_interval:
description:
- Number of logs entries between snapshot.
- Docker default value is C(10000).
type: int
keep_old_snapshots:
description:
- Number of snapshots to keep beyond the current snapshot.
- Docker default value is C(0).
type: int
log_entries_for_slow_followers:
description:
- Number of log entries to keep around to sync up slow followers after a snapshot is created.
type: int
heartbeat_tick:
description:
- Amount of ticks (in seconds) between each heartbeat.
- Docker default value is C(1s).
type: int
election_tick:
description:
- Amount of ticks (in seconds) needed without a leader to trigger a new election.
- Docker default value is C(10s).
type: int
dispatcher_heartbeat_period:
description:
- The delay for an agent to send a heartbeat to the dispatcher.
- Docker default value is C(5s).
type: int
node_cert_expiry:
description:
- Automatic expiry for nodes certificates.
- Docker default value is C(3months).
type: int
name:
description:
- The name of the swarm.
type: str
labels:
description:
- User-defined key/value metadata.
- Label operations in this module apply to the docker swarm cluster.
Use M(docker_node) module to add/modify/remove swarm node labels.
- Requires API version >= 1.32.
type: dict
signing_ca_cert:
description:
- The desired signing CA certificate for all swarm node TLS leaf certificates, in PEM format.
- This must not be a path to a certificate, but the contents of the certificate.
- Requires API version >= 1.30.
type: str
signing_ca_key:
description:
- The desired signing CA key for all swarm node TLS leaf certificates, in PEM format.
- This must not be a path to a key, but the contents of the key.
- Requires API version >= 1.30.
type: str
ca_force_rotate:
description:
- An integer whose purpose is to force swarm to generate a new signing CA certificate and key,
if none have been specified.
- Docker default value is C(0).
- Requires API version >= 1.30.
type: int
autolock_managers:
description:
- If set, generate a key and use it to lock data stored on the managers.
- Docker default value is C(no).
- M(docker_swarm_info) can be used to retrieve the unlock key.
type: bool
rotate_worker_token:
description: Rotate the worker join token.
type: bool
default: no
rotate_manager_token:
description: Rotate the manager join token.
type: bool
default: no
extends_documentation_fragment:
- docker
- docker.docker_py_1_documentation
requirements:
- "L(Docker SDK for Python,https://docker-py.readthedocs.io/en/stable/) >= 1.10.0 (use L(docker-py,https://pypi.org/project/docker-py/) for Python 2.6)"
- Docker API >= 1.25
author:
- Thierry Bouvet (@tbouvet)
- Piotr Wojciechowski (@WojciechowskiPiotr)
'''
EXAMPLES = '''
- name: Init a new swarm with default parameters
docker_swarm:
state: present
- name: Update swarm configuration
docker_swarm:
state: present
election_tick: 5
- name: Add nodes
docker_swarm:
state: join
advertise_addr: 192.168.1.2
join_token: SWMTKN-1--xxxxx
remote_addrs: [ '192.168.1.1:2377' ]
- name: Leave swarm for a node
docker_swarm:
state: absent
- name: Remove a swarm manager
docker_swarm:
state: absent
force: true
- name: Remove node from swarm
docker_swarm:
state: remove
node_id: mynode
- name: Inspect swarm
docker_swarm:
state: inspect
register: swarm_info
'''
RETURN = '''
swarm_facts:
description: Informations about swarm.
returned: success
type: dict
contains:
JoinTokens:
description: Tokens to connect to the Swarm.
returned: success
type: dict
contains:
Worker:
description: Token to create a new *worker* node
returned: success
type: str
example: SWMTKN-1--xxxxx
Manager:
description: Token to create a new *manager* node
returned: success
type: str
example: SWMTKN-1--xxxxx
UnlockKey:
description: The swarm unlock-key if I(autolock_managers) is C(true).
returned: on success if I(autolock_managers) is C(true)
and swarm is initialised, or if I(autolock_managers) has changed.
type: str
example: SWMKEY-1-xxx
actions:
description: Provides the actions done on the swarm.
returned: when action failed.
type: list
elements: str
example: "['This cluster is already a swarm cluster']"
'''
import json
import traceback
try:
from docker.errors import DockerException, APIError
except ImportError:
# missing Docker SDK for Python handled in ansible.module_utils.docker.common
pass
from ansible.module_utils.docker.common import (
DockerBaseClass,
DifferenceTracker,
RequestException,
)
from ansible.module_utils.docker.swarm import AnsibleDockerSwarmClient
from ansible.module_utils._text import to_native
class TaskParameters(DockerBaseClass):
def __init__(self):
super(TaskParameters, self).__init__()
self.advertise_addr = None
self.listen_addr = None
self.remote_addrs = None
self.join_token = None
# Spec
self.snapshot_interval = None
self.task_history_retention_limit = None
self.keep_old_snapshots = None
self.log_entries_for_slow_followers = None
self.heartbeat_tick = None
self.election_tick = None
self.dispatcher_heartbeat_period = None
self.node_cert_expiry = None
self.name = None
self.labels = None
self.log_driver = None
self.signing_ca_cert = None
self.signing_ca_key = None
self.ca_force_rotate = None
self.autolock_managers = None
self.rotate_worker_token = None
self.rotate_manager_token = None
self.default_addr_pool = None
self.subnet_size = None
@staticmethod
def from_ansible_params(client):
result = TaskParameters()
for key, value in client.module.params.items():
if key in result.__dict__:
setattr(result, key, value)
result.update_parameters(client)
return result
def update_from_swarm_info(self, swarm_info):
spec = swarm_info['Spec']
ca_config = spec.get('CAConfig') or dict()
if self.node_cert_expiry is None:
self.node_cert_expiry = ca_config.get('NodeCertExpiry')
if self.ca_force_rotate is None:
self.ca_force_rotate = ca_config.get('ForceRotate')
dispatcher = spec.get('Dispatcher') or dict()
if self.dispatcher_heartbeat_period is None:
self.dispatcher_heartbeat_period = dispatcher.get('HeartbeatPeriod')
raft = spec.get('Raft') or dict()
if self.snapshot_interval is None:
self.snapshot_interval = raft.get('SnapshotInterval')
if self.keep_old_snapshots is None:
self.keep_old_snapshots = raft.get('KeepOldSnapshots')
if self.heartbeat_tick is None:
self.heartbeat_tick = raft.get('HeartbeatTick')
if self.log_entries_for_slow_followers is None:
self.log_entries_for_slow_followers = raft.get('LogEntriesForSlowFollowers')
if self.election_tick is None:
self.election_tick = raft.get('ElectionTick')
orchestration = spec.get('Orchestration') or dict()
if self.task_history_retention_limit is None:
self.task_history_retention_limit = orchestration.get('TaskHistoryRetentionLimit')
encryption_config = spec.get('EncryptionConfig') or dict()
if self.autolock_managers is None:
self.autolock_managers = encryption_config.get('AutoLockManagers')
if self.name is None:
self.name = spec['Name']
if self.labels is None:
self.labels = spec.get('Labels') or {}
if 'LogDriver' in spec['TaskDefaults']:
self.log_driver = spec['TaskDefaults']['LogDriver']
def update_parameters(self, client):
assign = dict(
snapshot_interval='snapshot_interval',
task_history_retention_limit='task_history_retention_limit',
keep_old_snapshots='keep_old_snapshots',
log_entries_for_slow_followers='log_entries_for_slow_followers',
heartbeat_tick='heartbeat_tick',
election_tick='election_tick',
dispatcher_heartbeat_period='dispatcher_heartbeat_period',
node_cert_expiry='node_cert_expiry',
name='name',
labels='labels',
signing_ca_cert='signing_ca_cert',
signing_ca_key='signing_ca_key',
ca_force_rotate='ca_force_rotate',
autolock_managers='autolock_managers',
log_driver='log_driver',
)
params = dict()
for dest, source in assign.items():
if not client.option_minimal_versions[source]['supported']:
continue
value = getattr(self, source)
if value is not None:
params[dest] = value
self.spec = client.create_swarm_spec(**params)
def compare_to_active(self, other, client, differences):
for k in self.__dict__:
if k in ('advertise_addr', 'listen_addr', 'remote_addrs', 'join_token',
'rotate_worker_token', 'rotate_manager_token', 'spec',
'default_addr_pool', 'subnet_size'):
continue
if not client.option_minimal_versions[k]['supported']:
continue
value = getattr(self, k)
if value is None:
continue
other_value = getattr(other, k)
if value != other_value:
differences.add(k, parameter=value, active=other_value)
if self.rotate_worker_token:
differences.add('rotate_worker_token', parameter=True, active=False)
if self.rotate_manager_token:
differences.add('rotate_manager_token', parameter=True, active=False)
return differences
class SwarmManager(DockerBaseClass):
def __init__(self, client, results):
super(SwarmManager, self).__init__()
self.client = client
self.results = results
self.check_mode = self.client.check_mode
self.swarm_info = {}
self.state = client.module.params['state']
self.force = client.module.params['force']
self.node_id = client.module.params['node_id']
self.differences = DifferenceTracker()
self.parameters = TaskParameters.from_ansible_params(client)
self.created = False
def __call__(self):
choice_map = {
"present": self.init_swarm,
"join": self.join,
"absent": self.leave,
"remove": self.remove,
"inspect": self.inspect_swarm
}
if self.state == 'inspect':
self.client.module.deprecate(
"The 'inspect' state is deprecated, please use 'docker_swarm_info' to inspect swarm cluster",
version='2.12', collection_name='ansible.builtin')
choice_map.get(self.state)()
if self.client.module._diff or self.parameters.debug:
diff = dict()
diff['before'], diff['after'] = self.differences.get_before_after()
self.results['diff'] = diff
def inspect_swarm(self):
try:
data = self.client.inspect_swarm()
json_str = json.dumps(data, ensure_ascii=False)
self.swarm_info = json.loads(json_str)
self.results['changed'] = False
self.results['swarm_facts'] = self.swarm_info
unlock_key = self.get_unlock_key()
self.swarm_info.update(unlock_key)
except APIError:
return
def get_unlock_key(self):
default = {'UnlockKey': None}
if not self.has_swarm_lock_changed():
return default
try:
return self.client.get_unlock_key() or default
except APIError:
return default
def has_swarm_lock_changed(self):
return self.parameters.autolock_managers and (
self.created or self.differences.has_difference_for('autolock_managers')
)
def init_swarm(self):
if not self.force and self.client.check_if_swarm_manager():
self.__update_swarm()
return
if not self.check_mode:
init_arguments = {
'advertise_addr': self.parameters.advertise_addr,
'listen_addr': self.parameters.listen_addr,
'force_new_cluster': self.force,
'swarm_spec': self.parameters.spec,
}
if self.parameters.default_addr_pool is not None:
init_arguments['default_addr_pool'] = self.parameters.default_addr_pool
if self.parameters.subnet_size is not None:
init_arguments['subnet_size'] = self.parameters.subnet_size
try:
self.client.init_swarm(**init_arguments)
except APIError as exc:
self.client.fail("Can not create a new Swarm Cluster: %s" % to_native(exc))
if not self.client.check_if_swarm_manager():
if not self.check_mode:
self.client.fail("Swarm not created or other error!")
self.created = True
self.inspect_swarm()
self.results['actions'].append("New Swarm cluster created: %s" % (self.swarm_info.get('ID')))
self.differences.add('state', parameter='present', active='absent')
self.results['changed'] = True
self.results['swarm_facts'] = {
'JoinTokens': self.swarm_info.get('JoinTokens'),
'UnlockKey': self.swarm_info.get('UnlockKey')
}
def __update_swarm(self):
try:
self.inspect_swarm()
version = self.swarm_info['Version']['Index']
self.parameters.update_from_swarm_info(self.swarm_info)
old_parameters = TaskParameters()
old_parameters.update_from_swarm_info(self.swarm_info)
self.parameters.compare_to_active(old_parameters, self.client, self.differences)
if self.differences.empty:
self.results['actions'].append("No modification")
self.results['changed'] = False
return
update_parameters = TaskParameters.from_ansible_params(self.client)
update_parameters.update_parameters(self.client)
if not self.check_mode:
self.client.update_swarm(
version=version, swarm_spec=update_parameters.spec,
rotate_worker_token=self.parameters.rotate_worker_token,
rotate_manager_token=self.parameters.rotate_manager_token)
except APIError as exc:
self.client.fail("Can not update a Swarm Cluster: %s" % to_native(exc))
return
self.inspect_swarm()
self.results['actions'].append("Swarm cluster updated")
self.results['changed'] = True
def join(self):
if self.client.check_if_swarm_node():
self.results['actions'].append("This node is already part of a swarm.")
return
if not self.check_mode:
try:
self.client.join_swarm(
remote_addrs=self.parameters.remote_addrs, join_token=self.parameters.join_token,
listen_addr=self.parameters.listen_addr, advertise_addr=self.parameters.advertise_addr)
except APIError as exc:
self.client.fail("Can not join the Swarm Cluster: %s" % to_native(exc))
self.results['actions'].append("New node is added to swarm cluster")
self.differences.add('joined', parameter=True, active=False)
self.results['changed'] = True
def leave(self):
if not self.client.check_if_swarm_node():
self.results['actions'].append("This node is not part of a swarm.")
return
if not self.check_mode:
try:
self.client.leave_swarm(force=self.force)
except APIError as exc:
self.client.fail("This node can not leave the Swarm Cluster: %s" % to_native(exc))
self.results['actions'].append("Node has left the swarm cluster")
self.differences.add('joined', parameter='absent', active='present')
self.results['changed'] = True
def remove(self):
if not self.client.check_if_swarm_manager():
self.client.fail("This node is not a manager.")
try:
status_down = self.client.check_if_swarm_node_is_down(node_id=self.node_id, repeat_check=5)
except APIError:
return
if not status_down:
self.client.fail("Can not remove the node. The status node is ready and not down.")
if not self.check_mode:
try:
self.client.remove_node(node_id=self.node_id, force=self.force)
except APIError as exc:
self.client.fail("Can not remove the node from the Swarm Cluster: %s" % to_native(exc))
self.results['actions'].append("Node is removed from swarm cluster.")
self.differences.add('joined', parameter=False, active=True)
self.results['changed'] = True
def _detect_remove_operation(client):
return client.module.params['state'] == 'remove'
def main():
argument_spec = dict(
advertise_addr=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'join', 'absent', 'remove', 'inspect']),
force=dict(type='bool', default=False),
listen_addr=dict(type='str', default='0.0.0.0:2377'),
remote_addrs=dict(type='list', elements='str'),
join_token=dict(type='str'),
snapshot_interval=dict(type='int'),
task_history_retention_limit=dict(type='int'),
keep_old_snapshots=dict(type='int'),
log_entries_for_slow_followers=dict(type='int'),
heartbeat_tick=dict(type='int'),
election_tick=dict(type='int'),
dispatcher_heartbeat_period=dict(type='int'),
node_cert_expiry=dict(type='int'),
name=dict(type='str'),
labels=dict(type='dict'),
signing_ca_cert=dict(type='str'),
signing_ca_key=dict(type='str'),
ca_force_rotate=dict(type='int'),
autolock_managers=dict(type='bool'),
node_id=dict(type='str'),
rotate_worker_token=dict(type='bool', default=False),
rotate_manager_token=dict(type='bool', default=False),
default_addr_pool=dict(type='list', elements='str'),
subnet_size=dict(type='int'),
)
required_if = [
('state', 'join', ['advertise_addr', 'remote_addrs', 'join_token']),
('state', 'remove', ['node_id'])
]
option_minimal_versions = dict(
labels=dict(docker_py_version='2.6.0', docker_api_version='1.32'),
signing_ca_cert=dict(docker_py_version='2.6.0', docker_api_version='1.30'),
signing_ca_key=dict(docker_py_version='2.6.0', docker_api_version='1.30'),
ca_force_rotate=dict(docker_py_version='2.6.0', docker_api_version='1.30'),
autolock_managers=dict(docker_py_version='2.6.0'),
log_driver=dict(docker_py_version='2.6.0'),
remove_operation=dict(
docker_py_version='2.4.0',
detect_usage=_detect_remove_operation,
usage_msg='remove swarm nodes'
),
default_addr_pool=dict(docker_py_version='4.0.0', docker_api_version='1.39'),
subnet_size=dict(docker_py_version='4.0.0', docker_api_version='1.39'),
)
client = AnsibleDockerSwarmClient(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=required_if,
min_docker_version='1.10.0',
min_docker_api_version='1.25',
option_minimal_versions=option_minimal_versions,
)
try:
results = dict(
changed=False,
result='',
actions=[]
)
SwarmManager(client, results)()
client.module.exit_json(**results)
except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(e), exception=traceback.format_exc())
except RequestException as e:
client.fail('An unexpected requests error occurred when docker-py tried to talk to the docker daemon: {0}'.format(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,284 |
Yum module only reports upgraded x86_64 packages when i686 packages have also been upgraded
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
I have created an Ansible playbook that invokes the `yum` module with the arguments `name="*" state=latest update_cache=yes update_only=yes`. The managed node is a 64-bit CentOS 7 system containing some i686 packages installed for compatibility with third-party 32-bit applications.
If 32-bit packages are available for upgrade, `yum` modules upgrades them alongside available 64-bit packages, however it only reports that 64-bit packages have been upgraded in the transaction. It should report upgraded 32-bit packages as well.
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
yum module
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.4
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/amg1127/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.1 (default, Dec 13 2020, 11:55:53) [GCC 10.2.0]
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
# (no output)
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Controller node: Arch Linux
Managed node: CentOS 7
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
The issue can be reproduced using the below Vagrantfile and Ansible playbook.
```ruby
# Vagrantfile
Vagrant.configure("2") do |config|
config.vm.box = "centos/7"
config.vm.box_version = "1809.1"
config.vm.provider "virtualbox" do |vb|
vb.gui = true
vb.memory = "1024"
end
config.vm.synced_folder ".", "/vagrant"
config.vm.provision "ansible" do |ansible|
ansible.playbook = "./ansible_playbook.yml"
end
end
```
<!--- Paste example playbooks or commands between quotes below -->
```yaml
# ansible_playbook.yml
---
- hosts: all
gather_facts: no
vars:
packages:
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc;2.17-260.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc-common;2.17-260.el7.x86_64.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libgcc;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libstdc++;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/ncurses-libs;5.9-14.20130511.el7_4.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nspr;4.19.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn-freebl;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-util;3.36.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/readline;6.2-10.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/sqlite;3.7.17-8.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/zlib;1.2.7-18.el7.i686.rpm'
tasks:
- name: Clean yum cache
shell: 'yum clean all; rm -Rfv /var/cache/yum'
become: yes
- name: Install some i686 packages alongside x86_64 ones
command:
argv: "{{ ['yum', '-y', 'localinstall', '--disablerepo=*'] + (packages | union(
packages | map('regex_replace', '\\.i686\\.rpm$', '.x86_64.rpm') | list) | map('replace', ';', '-') | list) }}"
become: yes
- name: Update all packages to the latest version
yum:
name: '*'
state: latest
update_cache: yes
update_only: yes
register: updated_packages
retries: 5
delay: 1
until: 'updated_packages is success'
become: yes
- name: Report the list of updated packages
debug:
msg:
- 'The packages below were updated:'
- "{{ updated_packages.changes.updated | default([]) | selectattr(0, 'in', (
packages | map('urlsplit', 'path') | map('basename') |
map('regex_replace', '^([^;]+);.*$', '\\1') | list
)) | list }}"
```
Place those files under an empty folder, install `Vagrant` and `VirtualBox` and launch the command line `vagrant up --provision`.
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
`yum` module should provide the below output:
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.i686 from updates"
],
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.i686 from base"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.i686 from base"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.i686 from updates"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.i686 from base"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.i686 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.i686 from base"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
`yum` module provides the below output:
<!--- Paste verbatim command output between quotes -->
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
|
https://github.com/ansible/ansible/issues/73284
|
https://github.com/ansible/ansible/pull/73548
|
8c413749fc1062f293a77482710f22c234dd3ebd
|
3504f4c45fc044b3ffd3fc96f02a9f261ec87048
| 2021-01-19T08:22:32Z |
python
| 2021-04-10T07:36:20Z |
changelogs/fragments/73284-yum-multiarch.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,284 |
Yum module only reports upgraded x86_64 packages when i686 packages have also been upgraded
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
I have created an Ansible playbook that invokes the `yum` module with the arguments `name="*" state=latest update_cache=yes update_only=yes`. The managed node is a 64-bit CentOS 7 system containing some i686 packages installed for compatibility with third-party 32-bit applications.
If 32-bit packages are available for upgrade, `yum` modules upgrades them alongside available 64-bit packages, however it only reports that 64-bit packages have been upgraded in the transaction. It should report upgraded 32-bit packages as well.
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
yum module
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.4
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/amg1127/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.1 (default, Dec 13 2020, 11:55:53) [GCC 10.2.0]
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
# (no output)
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Controller node: Arch Linux
Managed node: CentOS 7
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
The issue can be reproduced using the below Vagrantfile and Ansible playbook.
```ruby
# Vagrantfile
Vagrant.configure("2") do |config|
config.vm.box = "centos/7"
config.vm.box_version = "1809.1"
config.vm.provider "virtualbox" do |vb|
vb.gui = true
vb.memory = "1024"
end
config.vm.synced_folder ".", "/vagrant"
config.vm.provision "ansible" do |ansible|
ansible.playbook = "./ansible_playbook.yml"
end
end
```
<!--- Paste example playbooks or commands between quotes below -->
```yaml
# ansible_playbook.yml
---
- hosts: all
gather_facts: no
vars:
packages:
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc;2.17-260.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc-common;2.17-260.el7.x86_64.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libgcc;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libstdc++;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/ncurses-libs;5.9-14.20130511.el7_4.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nspr;4.19.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn-freebl;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-util;3.36.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/readline;6.2-10.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/sqlite;3.7.17-8.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/zlib;1.2.7-18.el7.i686.rpm'
tasks:
- name: Clean yum cache
shell: 'yum clean all; rm -Rfv /var/cache/yum'
become: yes
- name: Install some i686 packages alongside x86_64 ones
command:
argv: "{{ ['yum', '-y', 'localinstall', '--disablerepo=*'] + (packages | union(
packages | map('regex_replace', '\\.i686\\.rpm$', '.x86_64.rpm') | list) | map('replace', ';', '-') | list) }}"
become: yes
- name: Update all packages to the latest version
yum:
name: '*'
state: latest
update_cache: yes
update_only: yes
register: updated_packages
retries: 5
delay: 1
until: 'updated_packages is success'
become: yes
- name: Report the list of updated packages
debug:
msg:
- 'The packages below were updated:'
- "{{ updated_packages.changes.updated | default([]) | selectattr(0, 'in', (
packages | map('urlsplit', 'path') | map('basename') |
map('regex_replace', '^([^;]+);.*$', '\\1') | list
)) | list }}"
```
Place those files under an empty folder, install `Vagrant` and `VirtualBox` and launch the command line `vagrant up --provision`.
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
`yum` module should provide the below output:
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.i686 from updates"
],
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.i686 from base"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.i686 from base"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.i686 from updates"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.i686 from base"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.i686 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.i686 from base"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
`yum` module provides the below output:
<!--- Paste verbatim command output between quotes -->
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
|
https://github.com/ansible/ansible/issues/73284
|
https://github.com/ansible/ansible/pull/73548
|
8c413749fc1062f293a77482710f22c234dd3ebd
|
3504f4c45fc044b3ffd3fc96f02a9f261ec87048
| 2021-01-19T08:22:32Z |
python
| 2021-04-10T07:36:20Z |
lib/ansible/modules/yum.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Red Hat, Inc
# Written by Seth Vidal <skvidal at fedoraproject.org>
# Copyright: (c) 2014, Epic Games, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: yum
version_added: historical
short_description: Manages packages with the I(yum) package manager
description:
- Installs, upgrade, downgrades, removes, and lists packages and groups with the I(yum) package manager.
- This module only works on Python 2. If you require Python 3 support see the M(ansible.builtin.dnf) module.
options:
use_backend:
description:
- This module supports C(yum) (as it always has), this is known as C(yum3)/C(YUM3)/C(yum-deprecated) by
upstream yum developers. As of Ansible 2.7+, this module also supports C(YUM4), which is the
"new yum" and it has an C(dnf) backend.
- By default, this module will select the backend based on the C(ansible_pkg_mgr) fact.
default: "auto"
choices: [ auto, yum, yum4, dnf ]
type: str
version_added: "2.7"
name:
description:
- A package name or package specifier with version, like C(name-1.0).
- Comparison operators for package version are valid here C(>), C(<), C(>=), C(<=). Example - C(name>=1.0)
- If a previous version is specified, the task also needs to turn C(allow_downgrade) on.
See the C(allow_downgrade) documentation for caveats with downgrading packages.
- When using state=latest, this can be C('*') which means run C(yum -y update).
- You can also pass a url or a local path to a rpm file (using state=present).
To operate on several packages this can accept a comma separated string of packages or (as of 2.0) a list of packages.
aliases: [ pkg ]
type: list
elements: str
exclude:
description:
- Package name(s) to exclude when state=present, or latest
type: list
elements: str
version_added: "2.0"
list:
description:
- "Package name to run the equivalent of yum list --show-duplicates <package> against. In addition to listing packages,
use can also list the following: C(installed), C(updates), C(available) and C(repos)."
- This parameter is mutually exclusive with C(name).
type: str
state:
description:
- Whether to install (C(present) or C(installed), C(latest)), or remove (C(absent) or C(removed)) a package.
- C(present) and C(installed) will simply ensure that a desired package is installed.
- C(latest) will update the specified package if it's not of the latest available version.
- C(absent) and C(removed) will remove the specified package.
- Default is C(None), however in effect the default action is C(present) unless the C(autoremove) option is
enabled for this module, then C(absent) is inferred.
type: str
choices: [ absent, installed, latest, present, removed ]
enablerepo:
description:
- I(Repoid) of repositories to enable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a C(",").
- As of Ansible 2.7, this can alternatively be a list instead of C(",")
separated string
type: list
elements: str
version_added: "0.9"
disablerepo:
description:
- I(Repoid) of repositories to disable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a C(",").
- As of Ansible 2.7, this can alternatively be a list instead of C(",")
separated string
type: list
elements: str
version_added: "0.9"
conf_file:
description:
- The remote yum configuration file to use for the transaction.
type: str
version_added: "0.6"
disable_gpg_check:
description:
- Whether to disable the GPG checking of signatures of packages being
installed. Has an effect only if state is I(present) or I(latest).
type: bool
default: "no"
version_added: "1.2"
skip_broken:
description:
- Skip packages with broken dependencies(devsolve) and are causing problems.
type: bool
default: "no"
version_added: "2.3"
update_cache:
description:
- Force yum to check if cache is out of date and redownload if needed.
Has an effect only if state is I(present) or I(latest).
type: bool
default: "no"
aliases: [ expire-cache ]
version_added: "1.9"
validate_certs:
description:
- This only applies if using a https url as the source of the rpm. e.g. for localinstall. If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates as it avoids verifying the source site.
- Prior to 2.1 the code worked as if this was set to C(yes).
type: bool
default: "yes"
version_added: "2.1"
update_only:
description:
- When using latest, only update installed packages. Do not install packages.
- Has an effect only if state is I(latest)
default: "no"
type: bool
version_added: "2.5"
installroot:
description:
- Specifies an alternative installroot, relative to which all packages
will be installed.
default: "/"
type: str
version_added: "2.3"
security:
description:
- If set to C(yes), and C(state=latest) then only installs updates that have been marked security related.
type: bool
default: "no"
version_added: "2.4"
bugfix:
description:
- If set to C(yes), and C(state=latest) then only installs updates that have been marked bugfix related.
default: "no"
type: bool
version_added: "2.6"
allow_downgrade:
description:
- Specify if the named package and version is allowed to downgrade
a maybe already installed higher version of that package.
Note that setting allow_downgrade=True can make this module
behave in a non-idempotent way. The task could end up with a set
of packages that does not match the complete list of specified
packages to install (because dependencies between the downgraded
package and others can cause changes to the packages which were
in the earlier transaction).
type: bool
default: "no"
version_added: "2.4"
enable_plugin:
description:
- I(Plugin) name to enable for the install/update operation.
The enabled plugin will not persist beyond the transaction.
type: list
elements: str
version_added: "2.5"
disable_plugin:
description:
- I(Plugin) name to disable for the install/update operation.
The disabled plugins will not persist beyond the transaction.
type: list
elements: str
version_added: "2.5"
releasever:
description:
- Specifies an alternative release from which all packages will be
installed.
type: str
version_added: "2.7"
autoremove:
description:
- If C(yes), removes all "leaf" packages from the system that were originally
installed as dependencies of user-installed packages but which are no longer
required by any such package. Should be used alone or when state is I(absent)
- "NOTE: This feature requires yum >= 3.4.3 (RHEL/CentOS 7+)"
type: bool
default: "no"
version_added: "2.7"
disable_excludes:
description:
- Disable the excludes defined in YUM config files.
- If set to C(all), disables all excludes.
- If set to C(main), disable excludes defined in [main] in yum.conf.
- If set to C(repoid), disable excludes defined for given repo id.
type: str
version_added: "2.7"
download_only:
description:
- Only download the packages, do not install them.
default: "no"
type: bool
version_added: "2.7"
lock_timeout:
description:
- Amount of time to wait for the yum lockfile to be freed.
required: false
default: 30
type: int
version_added: "2.8"
install_weak_deps:
description:
- Will also install all packages linked by a weak dependency relation.
- "NOTE: This feature requires yum >= 4 (RHEL/CentOS 8+)"
type: bool
default: "yes"
version_added: "2.8"
download_dir:
description:
- Specifies an alternate directory to store packages.
- Has an effect only if I(download_only) is specified.
type: str
version_added: "2.8"
install_repoquery:
description:
- If repoquery is not available, install yum-utils. If the system is
registered to RHN or an RHN Satellite, repoquery allows for querying
all channels assigned to the system. It is also required to use the
'list' parameter.
- "NOTE: This will run and be logged as a separate yum transation which
takes place before any other installation or removal."
- "NOTE: This will use the system's default enabled repositories without
regard for disablerepo/enablerepo given to the module."
required: false
version_added: "1.5"
default: "yes"
type: bool
notes:
- When used with a `loop:` each package will be processed individually,
it is much more efficient to pass the list directly to the `name` option.
- In versions prior to 1.9.2 this module installed and removed each package
given to the yum module separately. This caused problems when packages
specified by filename or url had to be installed or removed together. In
1.9.2 this was fixed so that packages are installed in one yum
transaction. However, if one of the packages adds a new yum repository
that the other packages come from (such as epel-release) then that package
needs to be installed in a separate task. This mimics yum's command line
behaviour.
- 'Yum itself has two types of groups. "Package groups" are specified in the
rpm itself while "environment groups" are specified in a separate file
(usually by the distribution). Unfortunately, this division becomes
apparent to ansible users because ansible needs to operate on the group
of packages in a single transaction and yum requires groups to be specified
in different ways when used in that way. Package groups are specified as
"@development-tools" and environment groups are "@^gnome-desktop-environment".
Use the "yum group list hidden ids" command to see which category of group the group
you want to install falls into.'
- 'The yum module does not support clearing yum cache in an idempotent way, so it
was decided not to implement it, the only method is to use command and call the yum
command directly, namely "command: yum clean all"
https://github.com/ansible/ansible/pull/31450#issuecomment-352889579'
# informational: requirements for nodes
requirements:
- yum
author:
- Ansible Core Team
- Seth Vidal (@skvidal)
- Eduard Snesarev (@verm666)
- Berend De Schouwer (@berenddeschouwer)
- Abhijeet Kasurde (@Akasurde)
- Adam Miller (@maxamillion)
'''
EXAMPLES = '''
- name: Install the latest version of Apache
yum:
name: httpd
state: latest
- name: Install Apache >= 2.4
yum:
name: httpd>=2.4
state: present
- name: Install a list of packages (suitable replacement for 2.11 loop deprecation warning)
yum:
name:
- nginx
- postgresql
- postgresql-server
state: present
- name: Install a list of packages with a list variable
yum:
name: "{{ packages }}"
vars:
packages:
- httpd
- httpd-tools
- name: Remove the Apache package
yum:
name: httpd
state: absent
- name: Install the latest version of Apache from the testing repo
yum:
name: httpd
enablerepo: testing
state: present
- name: Install one specific version of Apache
yum:
name: httpd-2.2.29-1.4.amzn1
state: present
- name: Upgrade all packages
yum:
name: '*'
state: latest
- name: Upgrade all packages, excluding kernel & foo related packages
yum:
name: '*'
state: latest
exclude: kernel*,foo*
- name: Install the nginx rpm from a remote repo
yum:
name: http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm
state: present
- name: Install nginx rpm from a local file
yum:
name: /usr/local/src/nginx-release-centos-6-0.el6.ngx.noarch.rpm
state: present
- name: Install the 'Development tools' package group
yum:
name: "@Development tools"
state: present
- name: Install the 'Gnome desktop' environment group
yum:
name: "@^gnome-desktop-environment"
state: present
- name: List ansible packages and register result to print with debug later
yum:
list: ansible
register: result
- name: Install package with multiple repos enabled
yum:
name: sos
enablerepo: "epel,ol7_latest"
- name: Install package with multiple repos disabled
yum:
name: sos
disablerepo: "epel,ol7_latest"
- name: Download the nginx package but do not install it
yum:
name:
- nginx
state: latest
download_only: true
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.respawn import has_respawned, respawn_module
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.yumdnf import YumDnf, yumdnf_argument_spec
import errno
import os
import re
import sys
import tempfile
try:
import rpm
HAS_RPM_PYTHON = True
except ImportError:
HAS_RPM_PYTHON = False
try:
import yum
HAS_YUM_PYTHON = True
except ImportError:
HAS_YUM_PYTHON = False
try:
from yum.misc import find_unfinished_transactions, find_ts_remaining
from rpmUtils.miscutils import splitFilename, compareEVR
transaction_helpers = True
except ImportError:
transaction_helpers = False
from contextlib import contextmanager
from ansible.module_utils.urls import fetch_file
def_qf = "%{epoch}:%{name}-%{version}-%{release}.%{arch}"
rpmbin = None
class YumModule(YumDnf):
"""
Yum Ansible module back-end implementation
"""
def __init__(self, module):
# state=installed name=pkgspec
# state=removed name=pkgspec
# state=latest name=pkgspec
#
# informational commands:
# list=installed
# list=updates
# list=available
# list=repos
# list=pkgspec
# This populates instance vars for all argument spec params
super(YumModule, self).__init__(module)
self.pkg_mgr_name = "yum"
self.lockfile = '/var/run/yum.pid'
self._yum_base = None
def _enablerepos_with_error_checking(self):
# NOTE: This seems unintuitive, but it mirrors yum's CLI behavior
if len(self.enablerepo) == 1:
try:
self.yum_base.repos.enableRepo(self.enablerepo[0])
except yum.Errors.YumBaseError as e:
if u'repository not found' in to_text(e):
self.module.fail_json(msg="Repository %s not found." % self.enablerepo[0])
else:
raise e
else:
for rid in self.enablerepo:
try:
self.yum_base.repos.enableRepo(rid)
except yum.Errors.YumBaseError as e:
if u'repository not found' in to_text(e):
self.module.warn("Repository %s not found." % rid)
else:
raise e
def is_lockfile_pid_valid(self):
try:
try:
with open(self.lockfile, 'r') as f:
oldpid = int(f.readline())
except ValueError:
# invalid data
os.unlink(self.lockfile)
return False
if oldpid == os.getpid():
# that's us?
os.unlink(self.lockfile)
return False
try:
with open("/proc/%d/stat" % oldpid, 'r') as f:
stat = f.readline()
if stat.split()[2] == 'Z':
# Zombie
os.unlink(self.lockfile)
return False
except IOError:
# either /proc is not mounted or the process is already dead
try:
# check the state of the process
os.kill(oldpid, 0)
except OSError as e:
if e.errno == errno.ESRCH:
# No such process
os.unlink(self.lockfile)
return False
self.module.fail_json(msg="Unable to check PID %s in %s: %s" % (oldpid, self.lockfile, to_native(e)))
except (IOError, OSError) as e:
# lockfile disappeared?
return False
# another copy seems to be running
return True
@property
def yum_base(self):
if self._yum_base:
return self._yum_base
else:
# Only init once
self._yum_base = yum.YumBase()
self._yum_base.preconf.debuglevel = 0
self._yum_base.preconf.errorlevel = 0
self._yum_base.preconf.plugins = True
self._yum_base.preconf.enabled_plugins = self.enable_plugin
self._yum_base.preconf.disabled_plugins = self.disable_plugin
if self.releasever:
self._yum_base.preconf.releasever = self.releasever
if self.installroot != '/':
# do not setup installroot by default, because of error
# CRITICAL:yum.cli:Config Error: Error accessing file for config file:////etc/yum.conf
# in old yum version (like in CentOS 6.6)
self._yum_base.preconf.root = self.installroot
self._yum_base.conf.installroot = self.installroot
if self.conf_file and os.path.exists(self.conf_file):
self._yum_base.preconf.fn = self.conf_file
if os.geteuid() != 0:
if hasattr(self._yum_base, 'setCacheDir'):
self._yum_base.setCacheDir()
else:
cachedir = yum.misc.getCacheDir()
self._yum_base.repos.setCacheDir(cachedir)
self._yum_base.conf.cache = 0
if self.disable_excludes:
self._yum_base.conf.disable_excludes = self.disable_excludes
# A sideeffect of accessing conf is that the configuration is
# loaded and plugins are discovered
self.yum_base.conf
try:
for rid in self.disablerepo:
self.yum_base.repos.disableRepo(rid)
self._enablerepos_with_error_checking()
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
return self._yum_base
def po_to_envra(self, po):
if hasattr(po, 'ui_envra'):
return po.ui_envra
return '%s:%s-%s-%s.%s' % (po.epoch, po.name, po.version, po.release, po.arch)
def is_group_env_installed(self, name):
name_lower = name.lower()
if yum.__version_info__ >= (3, 4):
groups_list = self.yum_base.doGroupLists(return_evgrps=True)
else:
groups_list = self.yum_base.doGroupLists()
# list of the installed groups on the first index
groups = groups_list[0]
for group in groups:
if name_lower.endswith(group.name.lower()) or name_lower.endswith(group.groupid.lower()):
return True
if yum.__version_info__ >= (3, 4):
# list of the installed env_groups on the third index
envs = groups_list[2]
for env in envs:
if name_lower.endswith(env.name.lower()) or name_lower.endswith(env.environmentid.lower()):
return True
return False
def is_installed(self, repoq, pkgspec, qf=None, is_pkg=False):
if qf is None:
qf = "%{epoch}:%{name}-%{version}-%{release}.%{arch}\n"
if not repoq:
pkgs = []
try:
e, m, _ = self.yum_base.rpmdb.matchPackageNames([pkgspec])
pkgs = e + m
if not pkgs and not is_pkg:
pkgs.extend(self.yum_base.returnInstalledPackagesByDep(pkgspec))
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
return [self.po_to_envra(p) for p in pkgs]
else:
global rpmbin
if not rpmbin:
rpmbin = self.module.get_bin_path('rpm', required=True)
cmd = [rpmbin, '-q', '--qf', qf, pkgspec]
if self.installroot != '/':
cmd.extend(['--root', self.installroot])
# rpm localizes messages and we're screen scraping so make sure we use
# the C locale
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = self.module.run_command(cmd, environ_update=lang_env)
if rc != 0 and 'is not installed' not in out:
self.module.fail_json(msg='Error from rpm: %s: %s' % (cmd, err))
if 'is not installed' in out:
out = ''
pkgs = [p for p in out.replace('(none)', '0').split('\n') if p.strip()]
if not pkgs and not is_pkg:
cmd = [rpmbin, '-q', '--qf', qf, '--whatprovides', pkgspec]
if self.installroot != '/':
cmd.extend(['--root', self.installroot])
rc2, out2, err2 = self.module.run_command(cmd, environ_update=lang_env)
else:
rc2, out2, err2 = (0, '', '')
if rc2 != 0 and 'no package provides' not in out2:
self.module.fail_json(msg='Error from rpm: %s: %s' % (cmd, err + err2))
if 'no package provides' in out2:
out2 = ''
pkgs += [p for p in out2.replace('(none)', '0').split('\n') if p.strip()]
return pkgs
return []
def is_available(self, repoq, pkgspec, qf=def_qf):
if not repoq:
pkgs = []
try:
e, m, _ = self.yum_base.pkgSack.matchPackageNames([pkgspec])
pkgs = e + m
if not pkgs:
pkgs.extend(self.yum_base.returnPackagesByDep(pkgspec))
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
return [self.po_to_envra(p) for p in pkgs]
else:
myrepoq = list(repoq)
r_cmd = ['--disablerepo', ','.join(self.disablerepo)]
myrepoq.extend(r_cmd)
r_cmd = ['--enablerepo', ','.join(self.enablerepo)]
myrepoq.extend(r_cmd)
if self.releasever:
myrepoq.extend('--releasever=%s' % self.releasever)
cmd = myrepoq + ["--qf", qf, pkgspec]
rc, out, err = self.module.run_command(cmd)
if rc == 0:
return [p for p in out.split('\n') if p.strip()]
else:
self.module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err))
return []
def is_update(self, repoq, pkgspec, qf=def_qf):
if not repoq:
pkgs = []
updates = []
try:
pkgs = self.yum_base.returnPackagesByDep(pkgspec) + \
self.yum_base.returnInstalledPackagesByDep(pkgspec)
if not pkgs:
e, m, _ = self.yum_base.pkgSack.matchPackageNames([pkgspec])
pkgs = e + m
updates = self.yum_base.doPackageLists(pkgnarrow='updates').updates
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
retpkgs = (pkg for pkg in pkgs if pkg in updates)
return set(self.po_to_envra(p) for p in retpkgs)
else:
myrepoq = list(repoq)
r_cmd = ['--disablerepo', ','.join(self.disablerepo)]
myrepoq.extend(r_cmd)
r_cmd = ['--enablerepo', ','.join(self.enablerepo)]
myrepoq.extend(r_cmd)
if self.releasever:
myrepoq.extend('--releasever=%s' % self.releasever)
cmd = myrepoq + ["--pkgnarrow=updates", "--qf", qf, pkgspec]
rc, out, err = self.module.run_command(cmd)
if rc == 0:
return set(p for p in out.split('\n') if p.strip())
else:
self.module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err))
return set()
def what_provides(self, repoq, req_spec, qf=def_qf):
if not repoq:
pkgs = []
try:
try:
pkgs = self.yum_base.returnPackagesByDep(req_spec) + \
self.yum_base.returnInstalledPackagesByDep(req_spec)
except Exception as e:
# If a repo with `repo_gpgcheck=1` is added and the repo GPG
# key was never accepted, querying this repo will throw an
# error: 'repomd.xml signature could not be verified'. In that
# situation we need to run `yum -y makecache` which will accept
# the key and try again.
if 'repomd.xml signature could not be verified' in to_native(e):
if self.releasever:
self.module.run_command(self.yum_basecmd + ['makecache'] + ['--releasever=%s' % self.releasever])
else:
self.module.run_command(self.yum_basecmd + ['makecache'])
pkgs = self.yum_base.returnPackagesByDep(req_spec) + \
self.yum_base.returnInstalledPackagesByDep(req_spec)
else:
raise
if not pkgs:
e, m, _ = self.yum_base.pkgSack.matchPackageNames([req_spec])
pkgs.extend(e)
pkgs.extend(m)
e, m, _ = self.yum_base.rpmdb.matchPackageNames([req_spec])
pkgs.extend(e)
pkgs.extend(m)
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
return set(self.po_to_envra(p) for p in pkgs)
else:
myrepoq = list(repoq)
r_cmd = ['--disablerepo', ','.join(self.disablerepo)]
myrepoq.extend(r_cmd)
r_cmd = ['--enablerepo', ','.join(self.enablerepo)]
myrepoq.extend(r_cmd)
if self.releasever:
myrepoq.extend('--releasever=%s' % self.releasever)
cmd = myrepoq + ["--qf", qf, "--whatprovides", req_spec]
rc, out, err = self.module.run_command(cmd)
cmd = myrepoq + ["--qf", qf, req_spec]
rc2, out2, err2 = self.module.run_command(cmd)
if rc == 0 and rc2 == 0:
out += out2
pkgs = set([p for p in out.split('\n') if p.strip()])
if not pkgs:
pkgs = self.is_installed(repoq, req_spec, qf=qf)
return pkgs
else:
self.module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err + err2))
return set()
def transaction_exists(self, pkglist):
"""
checks the package list to see if any packages are
involved in an incomplete transaction
"""
conflicts = []
if not transaction_helpers:
return conflicts
# first, we create a list of the package 'nvreas'
# so we can compare the pieces later more easily
pkglist_nvreas = (splitFilename(pkg) for pkg in pkglist)
# next, we build the list of packages that are
# contained within an unfinished transaction
unfinished_transactions = find_unfinished_transactions()
for trans in unfinished_transactions:
steps = find_ts_remaining(trans)
for step in steps:
# the action is install/erase/etc., but we only
# care about the package spec contained in the step
(action, step_spec) = step
(n, v, r, e, a) = splitFilename(step_spec)
# and see if that spec is in the list of packages
# requested for installation/updating
for pkg in pkglist_nvreas:
# if the name and arch match, we're going to assume
# this package is part of a pending transaction
# the label is just for display purposes
label = "%s-%s" % (n, a)
if n == pkg[0] and a == pkg[4]:
if label not in conflicts:
conflicts.append("%s-%s" % (n, a))
break
return conflicts
def local_envra(self, path):
"""return envra of a local rpm passed in"""
ts = rpm.TransactionSet()
ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
fd = os.open(path, os.O_RDONLY)
try:
header = ts.hdrFromFdno(fd)
except rpm.error as e:
return None
finally:
os.close(fd)
return '%s:%s-%s-%s.%s' % (
header[rpm.RPMTAG_EPOCH] or '0',
header[rpm.RPMTAG_NAME],
header[rpm.RPMTAG_VERSION],
header[rpm.RPMTAG_RELEASE],
header[rpm.RPMTAG_ARCH]
)
@contextmanager
def set_env_proxy(self):
# setting system proxy environment and saving old, if exists
namepass = ""
scheme = ["http", "https"]
old_proxy_env = [os.getenv("http_proxy"), os.getenv("https_proxy")]
try:
# "_none_" is a special value to disable proxy in yum.conf/*.repo
if self.yum_base.conf.proxy and self.yum_base.conf.proxy not in ("_none_",):
if self.yum_base.conf.proxy_username:
namepass = namepass + self.yum_base.conf.proxy_username
proxy_url = self.yum_base.conf.proxy
if self.yum_base.conf.proxy_password:
namepass = namepass + ":" + self.yum_base.conf.proxy_password
elif '@' in self.yum_base.conf.proxy:
namepass = self.yum_base.conf.proxy.split('@')[0].split('//')[-1]
proxy_url = self.yum_base.conf.proxy.replace("{0}@".format(namepass), "")
if namepass:
namepass = namepass + '@'
for item in scheme:
os.environ[item + "_proxy"] = re.sub(
r"(http://)",
r"\g<1>" + namepass, proxy_url
)
else:
for item in scheme:
os.environ[item + "_proxy"] = self.yum_base.conf.proxy
yield
except yum.Errors.YumBaseError:
raise
finally:
# revert back to previously system configuration
for item in scheme:
if os.getenv("{0}_proxy".format(item)):
del os.environ["{0}_proxy".format(item)]
if old_proxy_env[0]:
os.environ["http_proxy"] = old_proxy_env[0]
if old_proxy_env[1]:
os.environ["https_proxy"] = old_proxy_env[1]
def pkg_to_dict(self, pkgstr):
if pkgstr.strip() and pkgstr.count('|') == 5:
n, e, v, r, a, repo = pkgstr.split('|')
else:
return {'error_parsing': pkgstr}
d = {
'name': n,
'arch': a,
'epoch': e,
'release': r,
'version': v,
'repo': repo,
'envra': '%s:%s-%s-%s.%s' % (e, n, v, r, a)
}
if repo == 'installed':
d['yumstate'] = 'installed'
else:
d['yumstate'] = 'available'
return d
def repolist(self, repoq, qf="%{repoid}"):
cmd = repoq + ["--qf", qf, "-a"]
if self.releasever:
cmd.extend(['--releasever=%s' % self.releasever])
rc, out, _ = self.module.run_command(cmd)
if rc == 0:
return set(p for p in out.split('\n') if p.strip())
else:
return []
def list_stuff(self, repoquerybin, stuff):
qf = "%{name}|%{epoch}|%{version}|%{release}|%{arch}|%{repoid}"
# is_installed goes through rpm instead of repoquery so it needs a slightly different format
is_installed_qf = "%{name}|%{epoch}|%{version}|%{release}|%{arch}|installed\n"
repoq = [repoquerybin, '--show-duplicates', '--plugins', '--quiet']
if self.disablerepo:
repoq.extend(['--disablerepo', ','.join(self.disablerepo)])
if self.enablerepo:
repoq.extend(['--enablerepo', ','.join(self.enablerepo)])
if self.installroot != '/':
repoq.extend(['--installroot', self.installroot])
if self.conf_file and os.path.exists(self.conf_file):
repoq += ['-c', self.conf_file]
if stuff == 'installed':
return [self.pkg_to_dict(p) for p in sorted(self.is_installed(repoq, '-a', qf=is_installed_qf)) if p.strip()]
if stuff == 'updates':
return [self.pkg_to_dict(p) for p in sorted(self.is_update(repoq, '-a', qf=qf)) if p.strip()]
if stuff == 'available':
return [self.pkg_to_dict(p) for p in sorted(self.is_available(repoq, '-a', qf=qf)) if p.strip()]
if stuff == 'repos':
return [dict(repoid=name, state='enabled') for name in sorted(self.repolist(repoq)) if name.strip()]
return [
self.pkg_to_dict(p) for p in
sorted(self.is_installed(repoq, stuff, qf=is_installed_qf) + self.is_available(repoq, stuff, qf=qf))
if p.strip()
]
def exec_install(self, items, action, pkgs, res):
cmd = self.yum_basecmd + [action] + pkgs
if self.releasever:
cmd.extend(['--releasever=%s' % self.releasever])
if self.module.check_mode:
self.module.exit_json(changed=True, results=res['results'], changes=dict(installed=pkgs))
else:
res['changes'] = dict(installed=pkgs)
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = self.module.run_command(cmd, environ_update=lang_env)
if rc == 1:
for spec in items:
# Fail on invalid urls:
if ('://' in spec and ('No package %s available.' % spec in out or 'Cannot open: %s. Skipping.' % spec in err)):
err = 'Package at %s could not be installed' % spec
self.module.fail_json(changed=False, msg=err, rc=rc)
res['rc'] = rc
res['results'].append(out)
res['msg'] += err
res['changed'] = True
if ('Nothing to do' in out and rc == 0) or ('does not have any packages' in err):
res['changed'] = False
if rc != 0:
res['changed'] = False
self.module.fail_json(**res)
# Fail if yum prints 'No space left on device' because that means some
# packages failed executing their post install scripts because of lack of
# free space (e.g. kernel package couldn't generate initramfs). Note that
# yum can still exit with rc=0 even if some post scripts didn't execute
# correctly.
if 'No space left on device' in (out or err):
res['changed'] = False
res['msg'] = 'No space left on device'
self.module.fail_json(**res)
# FIXME - if we did an install - go and check the rpmdb to see if it actually installed
# look for each pkg in rpmdb
# look for each pkg via obsoletes
return res
def install(self, items, repoq):
pkgs = []
downgrade_pkgs = []
res = {}
res['results'] = []
res['msg'] = ''
res['rc'] = 0
res['changed'] = False
for spec in items:
pkg = None
downgrade_candidate = False
# check if pkgspec is installed (if possible for idempotence)
if spec.endswith('.rpm') or '://' in spec:
if '://' not in spec and not os.path.exists(spec):
res['msg'] += "No RPM file matching '%s' found on system" % spec
res['results'].append("No RPM file matching '%s' found on system" % spec)
res['rc'] = 127 # Ensure the task fails in with-loop
self.module.fail_json(**res)
if '://' in spec:
with self.set_env_proxy():
package = fetch_file(self.module, spec)
if not package.endswith('.rpm'):
# yum requires a local file to have the extension of .rpm and we
# can not guarantee that from an URL (redirects, proxies, etc)
new_package_path = '%s.rpm' % package
os.rename(package, new_package_path)
package = new_package_path
else:
package = spec
# most common case is the pkg is already installed
envra = self.local_envra(package)
if envra is None:
self.module.fail_json(msg="Failed to get nevra information from RPM package: %s" % spec)
installed_pkgs = self.is_installed(repoq, envra)
if installed_pkgs:
res['results'].append('%s providing %s is already installed' % (installed_pkgs[0], package))
continue
(name, ver, rel, epoch, arch) = splitFilename(envra)
installed_pkgs = self.is_installed(repoq, name)
# case for two same envr but different archs like x86_64 and i686
if len(installed_pkgs) == 2:
(cur_name0, cur_ver0, cur_rel0, cur_epoch0, cur_arch0) = splitFilename(installed_pkgs[0])
(cur_name1, cur_ver1, cur_rel1, cur_epoch1, cur_arch1) = splitFilename(installed_pkgs[1])
cur_epoch0 = cur_epoch0 or '0'
cur_epoch1 = cur_epoch1 or '0'
compare = compareEVR((cur_epoch0, cur_ver0, cur_rel0), (cur_epoch1, cur_ver1, cur_rel1))
if compare == 0 and cur_arch0 != cur_arch1:
for installed_pkg in installed_pkgs:
if installed_pkg.endswith(arch):
installed_pkgs = [installed_pkg]
if len(installed_pkgs) == 1:
installed_pkg = installed_pkgs[0]
(cur_name, cur_ver, cur_rel, cur_epoch, cur_arch) = splitFilename(installed_pkg)
cur_epoch = cur_epoch or '0'
compare = compareEVR((cur_epoch, cur_ver, cur_rel), (epoch, ver, rel))
# compare > 0 -> higher version is installed
# compare == 0 -> exact version is installed
# compare < 0 -> lower version is installed
if compare > 0 and self.allow_downgrade:
downgrade_candidate = True
elif compare >= 0:
continue
# else: if there are more installed packages with the same name, that would mean
# kernel, gpg-pubkey or like, so just let yum deal with it and try to install it
pkg = package
# groups
elif spec.startswith('@'):
if self.is_group_env_installed(spec):
continue
pkg = spec
# range requires or file-requires or pkgname :(
else:
# most common case is the pkg is already installed and done
# short circuit all the bs - and search for it as a pkg in is_installed
# if you find it then we're done
if not set(['*', '?']).intersection(set(spec)):
installed_pkgs = self.is_installed(repoq, spec, is_pkg=True)
if installed_pkgs:
res['results'].append('%s providing %s is already installed' % (installed_pkgs[0], spec))
continue
# look up what pkgs provide this
pkglist = self.what_provides(repoq, spec)
if not pkglist:
res['msg'] += "No package matching '%s' found available, installed or updated" % spec
res['results'].append("No package matching '%s' found available, installed or updated" % spec)
res['rc'] = 126 # Ensure the task fails in with-loop
self.module.fail_json(**res)
# if any of the packages are involved in a transaction, fail now
# so that we don't hang on the yum operation later
conflicts = self.transaction_exists(pkglist)
if conflicts:
res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts)
res['rc'] = 125 # Ensure the task fails in with-loop
self.module.fail_json(**res)
# if any of them are installed
# then nothing to do
found = False
for this in pkglist:
if self.is_installed(repoq, this, is_pkg=True):
found = True
res['results'].append('%s providing %s is already installed' % (this, spec))
break
# if the version of the pkg you have installed is not in ANY repo, but there are
# other versions in the repos (both higher and lower) then the previous checks won't work.
# so we check one more time. This really only works for pkgname - not for file provides or virt provides
# but virt provides should be all caught in what_provides on its own.
# highly irritating
if not found:
if self.is_installed(repoq, spec):
found = True
res['results'].append('package providing %s is already installed' % (spec))
if found:
continue
# Downgrade - The yum install command will only install or upgrade to a spec version, it will
# not install an older version of an RPM even if specified by the install spec. So we need to
# determine if this is a downgrade, and then use the yum downgrade command to install the RPM.
if self.allow_downgrade:
for package in pkglist:
# Get the NEVRA of the requested package using pkglist instead of spec because pkglist
# contains consistently-formatted package names returned by yum, rather than user input
# that is often not parsed correctly by splitFilename().
(name, ver, rel, epoch, arch) = splitFilename(package)
# Check if any version of the requested package is installed
inst_pkgs = self.is_installed(repoq, name, is_pkg=True)
if inst_pkgs:
(cur_name, cur_ver, cur_rel, cur_epoch, cur_arch) = splitFilename(inst_pkgs[0])
compare = compareEVR((cur_epoch, cur_ver, cur_rel), (epoch, ver, rel))
if compare > 0:
downgrade_candidate = True
else:
downgrade_candidate = False
break
# If package needs to be installed/upgraded/downgraded, then pass in the spec
# we could get here if nothing provides it but that's not
# the error we're catching here
pkg = spec
if downgrade_candidate and self.allow_downgrade:
downgrade_pkgs.append(pkg)
else:
pkgs.append(pkg)
if downgrade_pkgs:
res = self.exec_install(items, 'downgrade', downgrade_pkgs, res)
if pkgs:
res = self.exec_install(items, 'install', pkgs, res)
return res
def remove(self, items, repoq):
pkgs = []
res = {}
res['results'] = []
res['msg'] = ''
res['changed'] = False
res['rc'] = 0
for pkg in items:
if pkg.startswith('@'):
installed = self.is_group_env_installed(pkg)
else:
installed = self.is_installed(repoq, pkg)
if installed:
pkgs.append(pkg)
else:
res['results'].append('%s is not installed' % pkg)
if pkgs:
if self.module.check_mode:
self.module.exit_json(changed=True, results=res['results'], changes=dict(removed=pkgs))
else:
res['changes'] = dict(removed=pkgs)
# run an actual yum transaction
if self.autoremove:
cmd = self.yum_basecmd + ["autoremove"] + pkgs
else:
cmd = self.yum_basecmd + ["remove"] + pkgs
rc, out, err = self.module.run_command(cmd)
res['rc'] = rc
res['results'].append(out)
res['msg'] = err
if rc != 0:
if self.autoremove and 'No such command' in out:
self.module.fail_json(msg='Version of YUM too old for autoremove: Requires yum 3.4.3 (RHEL/CentOS 7+)')
else:
self.module.fail_json(**res)
# compile the results into one batch. If anything is changed
# then mark changed
# at the end - if we've end up failed then fail out of the rest
# of the process
# at this point we check to see if the pkg is no longer present
self._yum_base = None # previous YumBase package index is now invalid
for pkg in pkgs:
if pkg.startswith('@'):
installed = self.is_group_env_installed(pkg)
else:
installed = self.is_installed(repoq, pkg, is_pkg=True)
if installed:
# Return a message so it's obvious to the user why yum failed
# and which package couldn't be removed. More details:
# https://github.com/ansible/ansible/issues/35672
res['msg'] = "Package '%s' couldn't be removed!" % pkg
self.module.fail_json(**res)
res['changed'] = True
return res
def run_check_update(self):
# run check-update to see if we have packages pending
if self.releasever:
rc, out, err = self.module.run_command(self.yum_basecmd + ['check-update'] + ['--releasever=%s' % self.releasever])
else:
rc, out, err = self.module.run_command(self.yum_basecmd + ['check-update'])
return rc, out, err
@staticmethod
def parse_check_update(check_update_output):
updates = {}
obsoletes = {}
# remove incorrect new lines in longer columns in output from yum check-update
# yum line wrapping can move the repo to the next line
#
# Meant to filter out sets of lines like:
# some_looooooooooooooooooooooooooooooooooooong_package_name 1:1.2.3-1.el7
# some-repo-label
#
# But it also needs to avoid catching lines like:
# Loading mirror speeds from cached hostfile
#
# ceph.x86_64 1:11.2.0-0.el7 ceph
# preprocess string and filter out empty lines so the regex below works
out = re.sub(r'\n[^\w]\W+(.*)', r' \1', check_update_output)
available_updates = out.split('\n')
# build update dictionary
for line in available_updates:
line = line.split()
# ignore irrelevant lines
# '*' in line matches lines like mirror lists:
# * base: mirror.corbina.net
# len(line) != 3 or 6 could be junk or a continuation
# len(line) = 6 is package obsoletes
#
# FIXME: what is the '.' not in line conditional for?
if '*' in line or len(line) not in [3, 6] or '.' not in line[0]:
continue
pkg, version, repo = line[0], line[1], line[2]
name, dist = pkg.rsplit('.', 1)
updates.update({name: {'version': version, 'dist': dist, 'repo': repo}})
if len(line) == 6:
obsolete_pkg, obsolete_version, obsolete_repo = line[3], line[4], line[5]
obsolete_name, obsolete_dist = obsolete_pkg.rsplit('.', 1)
obsoletes.update({obsolete_name: {'version': obsolete_version, 'dist': obsolete_dist, 'repo': obsolete_repo}})
return updates, obsoletes
def latest(self, items, repoq):
res = {}
res['results'] = []
res['msg'] = ''
res['changed'] = False
res['rc'] = 0
pkgs = {}
pkgs['update'] = []
pkgs['install'] = []
updates = {}
obsoletes = {}
update_all = False
cmd = None
# determine if we're doing an update all
if '*' in items:
update_all = True
rc, out, err = self.run_check_update()
if rc == 0 and update_all:
res['results'].append('Nothing to do here, all packages are up to date')
return res
elif rc == 100:
updates, obsoletes = self.parse_check_update(out)
elif rc == 1:
res['msg'] = err
res['rc'] = rc
self.module.fail_json(**res)
if update_all:
cmd = self.yum_basecmd + ['update']
will_update = set(updates.keys())
will_update_from_other_package = dict()
else:
will_update = set()
will_update_from_other_package = dict()
for spec in items:
# some guess work involved with groups. update @<group> will install the group if missing
if spec.startswith('@'):
pkgs['update'].append(spec)
will_update.add(spec)
continue
# check if pkgspec is installed (if possible for idempotence)
# localpkg
if spec.endswith('.rpm') and '://' not in spec:
if not os.path.exists(spec):
res['msg'] += "No RPM file matching '%s' found on system" % spec
res['results'].append("No RPM file matching '%s' found on system" % spec)
res['rc'] = 127 # Ensure the task fails in with-loop
self.module.fail_json(**res)
# get the pkg e:name-v-r.arch
envra = self.local_envra(spec)
if envra is None:
self.module.fail_json(msg="Failed to get nevra information from RPM package: %s" % spec)
# local rpm files can't be updated
if self.is_installed(repoq, envra):
pkgs['update'].append(spec)
else:
pkgs['install'].append(spec)
continue
# URL
if '://' in spec:
# download package so that we can check if it's already installed
with self.set_env_proxy():
package = fetch_file(self.module, spec)
envra = self.local_envra(package)
if envra is None:
self.module.fail_json(msg="Failed to get nevra information from RPM package: %s" % spec)
# local rpm files can't be updated
if self.is_installed(repoq, envra):
pkgs['update'].append(spec)
else:
pkgs['install'].append(spec)
continue
# dep/pkgname - find it
if self.is_installed(repoq, spec):
pkgs['update'].append(spec)
else:
pkgs['install'].append(spec)
pkglist = self.what_provides(repoq, spec)
# FIXME..? may not be desirable to throw an exception here if a single package is missing
if not pkglist:
res['msg'] += "No package matching '%s' found available, installed or updated" % spec
res['results'].append("No package matching '%s' found available, installed or updated" % spec)
res['rc'] = 126 # Ensure the task fails in with-loop
self.module.fail_json(**res)
nothing_to_do = True
for pkg in pkglist:
if spec in pkgs['install'] and self.is_available(repoq, pkg):
nothing_to_do = False
break
# this contains the full NVR and spec could contain wildcards
# or virtual provides (like "python-*" or "smtp-daemon") while
# updates contains name only.
pkgname, _, _, _, _ = splitFilename(pkg)
if spec in pkgs['update'] and pkgname in updates:
nothing_to_do = False
will_update.add(spec)
# Massage the updates list
if spec != pkgname:
# For reporting what packages would be updated more
# succinctly
will_update_from_other_package[spec] = pkgname
break
if not self.is_installed(repoq, spec) and self.update_only:
res['results'].append("Packages providing %s not installed due to update_only specified" % spec)
continue
if nothing_to_do:
res['results'].append("All packages providing %s are up to date" % spec)
continue
# if any of the packages are involved in a transaction, fail now
# so that we don't hang on the yum operation later
conflicts = self.transaction_exists(pkglist)
if conflicts:
res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts)
res['results'].append("The following packages have pending transactions: %s" % ", ".join(conflicts))
res['rc'] = 128 # Ensure the task fails in with-loop
self.module.fail_json(**res)
# check_mode output
to_update = []
for w in will_update:
if w.startswith('@'):
to_update.append((w, None))
elif w not in updates:
other_pkg = will_update_from_other_package[w]
to_update.append(
(
w,
'because of (at least) %s-%s.%s from %s' % (
other_pkg,
updates[other_pkg]['version'],
updates[other_pkg]['dist'],
updates[other_pkg]['repo']
)
)
)
else:
to_update.append((w, '%s.%s from %s' % (updates[w]['version'], updates[w]['dist'], updates[w]['repo'])))
if self.update_only:
res['changes'] = dict(installed=[], updated=to_update)
else:
res['changes'] = dict(installed=pkgs['install'], updated=to_update)
if obsoletes:
res['obsoletes'] = obsoletes
# return results before we actually execute stuff
if self.module.check_mode:
if will_update or pkgs['install']:
res['changed'] = True
return res
if self.releasever:
cmd.extend(['--releasever=%s' % self.releasever])
# run commands
if cmd: # update all
rc, out, err = self.module.run_command(cmd)
res['changed'] = True
elif self.update_only:
if pkgs['update']:
cmd = self.yum_basecmd + ['update'] + pkgs['update']
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = self.module.run_command(cmd, environ_update=lang_env)
out_lower = out.strip().lower()
if not out_lower.endswith("no packages marked for update") and \
not out_lower.endswith("nothing to do"):
res['changed'] = True
else:
rc, out, err = [0, '', '']
elif pkgs['install'] or will_update and not self.update_only:
cmd = self.yum_basecmd + ['install'] + pkgs['install'] + pkgs['update']
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = self.module.run_command(cmd, environ_update=lang_env)
out_lower = out.strip().lower()
if not out_lower.endswith("no packages marked for update") and \
not out_lower.endswith("nothing to do"):
res['changed'] = True
else:
rc, out, err = [0, '', '']
res['rc'] = rc
res['msg'] += err
res['results'].append(out)
if rc:
res['failed'] = True
return res
def ensure(self, repoq):
pkgs = self.names
# autoremove was provided without `name`
if not self.names and self.autoremove:
pkgs = []
self.state = 'absent'
if self.conf_file and os.path.exists(self.conf_file):
self.yum_basecmd += ['-c', self.conf_file]
if repoq:
repoq += ['-c', self.conf_file]
if self.skip_broken:
self.yum_basecmd.extend(['--skip-broken'])
if self.disablerepo:
self.yum_basecmd.extend(['--disablerepo=%s' % ','.join(self.disablerepo)])
if self.enablerepo:
self.yum_basecmd.extend(['--enablerepo=%s' % ','.join(self.enablerepo)])
if self.enable_plugin:
self.yum_basecmd.extend(['--enableplugin', ','.join(self.enable_plugin)])
if self.disable_plugin:
self.yum_basecmd.extend(['--disableplugin', ','.join(self.disable_plugin)])
if self.exclude:
e_cmd = ['--exclude=%s' % ','.join(self.exclude)]
self.yum_basecmd.extend(e_cmd)
if self.disable_excludes:
self.yum_basecmd.extend(['--disableexcludes=%s' % self.disable_excludes])
if self.download_only:
self.yum_basecmd.extend(['--downloadonly'])
if self.download_dir:
self.yum_basecmd.extend(['--downloaddir=%s' % self.download_dir])
if self.releasever:
self.yum_basecmd.extend(['--releasever=%s' % self.releasever])
if self.installroot != '/':
# do not setup installroot by default, because of error
# CRITICAL:yum.cli:Config Error: Error accessing file for config file:////etc/yum.conf
# in old yum version (like in CentOS 6.6)
e_cmd = ['--installroot=%s' % self.installroot]
self.yum_basecmd.extend(e_cmd)
if self.state in ('installed', 'present', 'latest'):
""" The need of this entire if conditional has to be changed
this function is the ensure function that is called
in the main section.
This conditional tends to disable/enable repo for
install present latest action, same actually
can be done for remove and absent action
As solution I would advice to cal
try: self.yum_base.repos.disableRepo(disablerepo)
and
try: self.yum_base.repos.enableRepo(enablerepo)
right before any yum_cmd is actually called regardless
of yum action.
Please note that enable/disablerepo options are general
options, this means that we can call those with any action
option. https://linux.die.net/man/8/yum
This docstring will be removed together when issue: #21619
will be solved.
This has been triggered by: #19587
"""
if self.update_cache:
self.module.run_command(self.yum_basecmd + ['clean', 'expire-cache'])
try:
current_repos = self.yum_base.repos.repos.keys()
if self.enablerepo:
try:
new_repos = self.yum_base.repos.repos.keys()
for i in new_repos:
if i not in current_repos:
rid = self.yum_base.repos.getRepo(i)
a = rid.repoXML.repoid # nopep8 - https://github.com/ansible/ansible/pull/21475#pullrequestreview-22404868
current_repos = new_repos
except yum.Errors.YumBaseError as e:
self.module.fail_json(msg="Error setting/accessing repos: %s" % to_native(e))
except yum.Errors.YumBaseError as e:
self.module.fail_json(msg="Error accessing repos: %s" % to_native(e))
if self.state == 'latest' or self.update_only:
if self.disable_gpg_check:
self.yum_basecmd.append('--nogpgcheck')
if self.security:
self.yum_basecmd.append('--security')
if self.bugfix:
self.yum_basecmd.append('--bugfix')
res = self.latest(pkgs, repoq)
elif self.state in ('installed', 'present'):
if self.disable_gpg_check:
self.yum_basecmd.append('--nogpgcheck')
res = self.install(pkgs, repoq)
elif self.state in ('removed', 'absent'):
res = self.remove(pkgs, repoq)
else:
# should be caught by AnsibleModule argument_spec
self.module.fail_json(
msg="we should never get here unless this all failed",
changed=False,
results='',
errors='unexpected state'
)
return res
@staticmethod
def has_yum():
return HAS_YUM_PYTHON
def run(self):
"""
actually execute the module code backend
"""
if (not HAS_RPM_PYTHON or not HAS_YUM_PYTHON) and sys.executable != '/usr/bin/python' and not has_respawned():
respawn_module('/usr/bin/python')
# end of the line for this process; we'll exit here once the respawned module has completed
error_msgs = []
if not HAS_RPM_PYTHON:
error_msgs.append('The Python 2 bindings for rpm are needed for this module. If you require Python 3 support use the `dnf` Ansible module instead.')
if not HAS_YUM_PYTHON:
error_msgs.append('The Python 2 yum module is needed for this module. If you require Python 3 support use the `dnf` Ansible module instead.')
self.wait_for_lock()
if error_msgs:
self.module.fail_json(msg='. '.join(error_msgs))
# fedora will redirect yum to dnf, which has incompatibilities
# with how this module expects yum to operate. If yum-deprecated
# is available, use that instead to emulate the old behaviors.
if self.module.get_bin_path('yum-deprecated'):
yumbin = self.module.get_bin_path('yum-deprecated')
else:
yumbin = self.module.get_bin_path('yum')
# need debug level 2 to get 'Nothing to do' for groupinstall.
self.yum_basecmd = [yumbin, '-d', '2', '-y']
if self.update_cache and not self.names and not self.list:
rc, stdout, stderr = self.module.run_command(self.yum_basecmd + ['clean', 'expire-cache'])
if rc == 0:
self.module.exit_json(
changed=False,
msg="Cache updated",
rc=rc,
results=[]
)
else:
self.module.exit_json(
changed=False,
msg="Failed to update cache",
rc=rc,
results=[stderr],
)
repoquerybin = self.module.get_bin_path('repoquery', required=False)
if self.install_repoquery and not repoquerybin and not self.module.check_mode:
yum_path = self.module.get_bin_path('yum')
if yum_path:
if self.releasever:
self.module.run_command('%s -y install yum-utils --releasever %s' % (yum_path, self.releasever))
else:
self.module.run_command('%s -y install yum-utils' % yum_path)
repoquerybin = self.module.get_bin_path('repoquery', required=False)
if self.list:
if not repoquerybin:
self.module.fail_json(msg="repoquery is required to use list= with this module. Please install the yum-utils package.")
results = {'results': self.list_stuff(repoquerybin, self.list)}
else:
# If rhn-plugin is installed and no rhn-certificate is available on
# the system then users will see an error message using the yum API.
# Use repoquery in those cases.
repoquery = None
try:
yum_plugins = self.yum_base.plugins._plugins
except AttributeError:
pass
else:
if 'rhnplugin' in yum_plugins:
if repoquerybin:
repoquery = [repoquerybin, '--show-duplicates', '--plugins', '--quiet']
if self.installroot != '/':
repoquery.extend(['--installroot', self.installroot])
if self.disable_excludes:
# repoquery does not support --disableexcludes,
# so make a temp copy of yum.conf and get rid of the 'exclude=' line there
try:
with open('/etc/yum.conf', 'r') as f:
content = f.readlines()
tmp_conf_file = tempfile.NamedTemporaryFile(dir=self.module.tmpdir, delete=False)
self.module.add_cleanup_file(tmp_conf_file.name)
tmp_conf_file.writelines([c for c in content if not c.startswith("exclude=")])
tmp_conf_file.close()
except Exception as e:
self.module.fail_json(msg="Failure setting up repoquery: %s" % to_native(e))
repoquery.extend(['-c', tmp_conf_file.name])
results = self.ensure(repoquery)
if repoquery:
results['msg'] = '%s %s' % (
results.get('msg', ''),
'Warning: Due to potential bad behaviour with rhnplugin and certificates, used slower repoquery calls instead of Yum API.'
)
self.module.exit_json(**results)
def main():
# state=installed name=pkgspec
# state=removed name=pkgspec
# state=latest name=pkgspec
#
# informational commands:
# list=installed
# list=updates
# list=available
# list=repos
# list=pkgspec
yumdnf_argument_spec['argument_spec']['use_backend'] = dict(default='auto', choices=['auto', 'yum', 'yum4', 'dnf'])
module = AnsibleModule(
**yumdnf_argument_spec
)
module_implementation = YumModule(module)
module_implementation.run()
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,284 |
Yum module only reports upgraded x86_64 packages when i686 packages have also been upgraded
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
I have created an Ansible playbook that invokes the `yum` module with the arguments `name="*" state=latest update_cache=yes update_only=yes`. The managed node is a 64-bit CentOS 7 system containing some i686 packages installed for compatibility with third-party 32-bit applications.
If 32-bit packages are available for upgrade, `yum` modules upgrades them alongside available 64-bit packages, however it only reports that 64-bit packages have been upgraded in the transaction. It should report upgraded 32-bit packages as well.
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
yum module
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.4
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/amg1127/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.1 (default, Dec 13 2020, 11:55:53) [GCC 10.2.0]
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
# (no output)
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Controller node: Arch Linux
Managed node: CentOS 7
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
The issue can be reproduced using the below Vagrantfile and Ansible playbook.
```ruby
# Vagrantfile
Vagrant.configure("2") do |config|
config.vm.box = "centos/7"
config.vm.box_version = "1809.1"
config.vm.provider "virtualbox" do |vb|
vb.gui = true
vb.memory = "1024"
end
config.vm.synced_folder ".", "/vagrant"
config.vm.provision "ansible" do |ansible|
ansible.playbook = "./ansible_playbook.yml"
end
end
```
<!--- Paste example playbooks or commands between quotes below -->
```yaml
# ansible_playbook.yml
---
- hosts: all
gather_facts: no
vars:
packages:
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc;2.17-260.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc-common;2.17-260.el7.x86_64.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libgcc;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libstdc++;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/ncurses-libs;5.9-14.20130511.el7_4.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nspr;4.19.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn-freebl;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-util;3.36.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/readline;6.2-10.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/sqlite;3.7.17-8.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/zlib;1.2.7-18.el7.i686.rpm'
tasks:
- name: Clean yum cache
shell: 'yum clean all; rm -Rfv /var/cache/yum'
become: yes
- name: Install some i686 packages alongside x86_64 ones
command:
argv: "{{ ['yum', '-y', 'localinstall', '--disablerepo=*'] + (packages | union(
packages | map('regex_replace', '\\.i686\\.rpm$', '.x86_64.rpm') | list) | map('replace', ';', '-') | list) }}"
become: yes
- name: Update all packages to the latest version
yum:
name: '*'
state: latest
update_cache: yes
update_only: yes
register: updated_packages
retries: 5
delay: 1
until: 'updated_packages is success'
become: yes
- name: Report the list of updated packages
debug:
msg:
- 'The packages below were updated:'
- "{{ updated_packages.changes.updated | default([]) | selectattr(0, 'in', (
packages | map('urlsplit', 'path') | map('basename') |
map('regex_replace', '^([^;]+);.*$', '\\1') | list
)) | list }}"
```
Place those files under an empty folder, install `Vagrant` and `VirtualBox` and launch the command line `vagrant up --provision`.
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
`yum` module should provide the below output:
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.i686 from updates"
],
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.i686 from base"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.i686 from base"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.i686 from updates"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.i686 from base"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.i686 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.i686 from base"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
`yum` module provides the below output:
<!--- Paste verbatim command output between quotes -->
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
|
https://github.com/ansible/ansible/issues/73284
|
https://github.com/ansible/ansible/pull/73548
|
8c413749fc1062f293a77482710f22c234dd3ebd
|
3504f4c45fc044b3ffd3fc96f02a9f261ec87048
| 2021-01-19T08:22:32Z |
python
| 2021-04-10T07:36:20Z |
test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,284 |
Yum module only reports upgraded x86_64 packages when i686 packages have also been upgraded
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
I have created an Ansible playbook that invokes the `yum` module with the arguments `name="*" state=latest update_cache=yes update_only=yes`. The managed node is a 64-bit CentOS 7 system containing some i686 packages installed for compatibility with third-party 32-bit applications.
If 32-bit packages are available for upgrade, `yum` modules upgrades them alongside available 64-bit packages, however it only reports that 64-bit packages have been upgraded in the transaction. It should report upgraded 32-bit packages as well.
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
yum module
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.4
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/amg1127/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.1 (default, Dec 13 2020, 11:55:53) [GCC 10.2.0]
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
# (no output)
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Controller node: Arch Linux
Managed node: CentOS 7
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
The issue can be reproduced using the below Vagrantfile and Ansible playbook.
```ruby
# Vagrantfile
Vagrant.configure("2") do |config|
config.vm.box = "centos/7"
config.vm.box_version = "1809.1"
config.vm.provider "virtualbox" do |vb|
vb.gui = true
vb.memory = "1024"
end
config.vm.synced_folder ".", "/vagrant"
config.vm.provision "ansible" do |ansible|
ansible.playbook = "./ansible_playbook.yml"
end
end
```
<!--- Paste example playbooks or commands between quotes below -->
```yaml
# ansible_playbook.yml
---
- hosts: all
gather_facts: no
vars:
packages:
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc;2.17-260.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc-common;2.17-260.el7.x86_64.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libgcc;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libstdc++;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/ncurses-libs;5.9-14.20130511.el7_4.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nspr;4.19.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn-freebl;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-util;3.36.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/readline;6.2-10.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/sqlite;3.7.17-8.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/zlib;1.2.7-18.el7.i686.rpm'
tasks:
- name: Clean yum cache
shell: 'yum clean all; rm -Rfv /var/cache/yum'
become: yes
- name: Install some i686 packages alongside x86_64 ones
command:
argv: "{{ ['yum', '-y', 'localinstall', '--disablerepo=*'] + (packages | union(
packages | map('regex_replace', '\\.i686\\.rpm$', '.x86_64.rpm') | list) | map('replace', ';', '-') | list) }}"
become: yes
- name: Update all packages to the latest version
yum:
name: '*'
state: latest
update_cache: yes
update_only: yes
register: updated_packages
retries: 5
delay: 1
until: 'updated_packages is success'
become: yes
- name: Report the list of updated packages
debug:
msg:
- 'The packages below were updated:'
- "{{ updated_packages.changes.updated | default([]) | selectattr(0, 'in', (
packages | map('urlsplit', 'path') | map('basename') |
map('regex_replace', '^([^;]+);.*$', '\\1') | list
)) | list }}"
```
Place those files under an empty folder, install `Vagrant` and `VirtualBox` and launch the command line `vagrant up --provision`.
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
`yum` module should provide the below output:
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.i686 from updates"
],
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.i686 from base"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.i686 from base"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.i686 from updates"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.i686 from base"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.i686 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.i686 from base"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
`yum` module provides the below output:
<!--- Paste verbatim command output between quotes -->
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
|
https://github.com/ansible/ansible/issues/73284
|
https://github.com/ansible/ansible/pull/73548
|
8c413749fc1062f293a77482710f22c234dd3ebd
|
3504f4c45fc044b3ffd3fc96f02a9f261ec87048
| 2021-01-19T08:22:32Z |
python
| 2021-04-10T07:36:20Z |
test/integration/targets/yum/tasks/main.yml
|
# (c) 2014, James Tanner <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Note: We install the yum package onto Fedora so that this will work on dnf systems
# We want to test that for people who don't want to upgrade their systems.
- block:
- name: ensure test packages are removed before starting
yum:
name:
- sos
state: absent
- import_tasks: yum.yml
always:
- name: remove installed packages
yum:
name:
- sos
state: absent
- name: remove installed group
yum:
name: "@Custom Group"
state: absent
- name: On Fedora 28 the above won't remove the group which results in a failure in repo.yml below
yum:
name: dinginessentail
state: absent
when:
- ansible_distribution in ['Fedora']
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
- block:
- import_tasks: repo.yml
- import_tasks: yum_group_remove.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
always:
- yum_repository:
name: "{{ item }}"
state: absent
loop: "{{ repos }}"
- command: yum clean metadata
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
- import_tasks: yuminstallroot.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
- import_tasks: proxy.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
- import_tasks: check_mode_consistency.yml
when:
- (ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux'] and ansible_distribution_major_version|int == 7)
- import_tasks: lock.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,284 |
Yum module only reports upgraded x86_64 packages when i686 packages have also been upgraded
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
I have created an Ansible playbook that invokes the `yum` module with the arguments `name="*" state=latest update_cache=yes update_only=yes`. The managed node is a 64-bit CentOS 7 system containing some i686 packages installed for compatibility with third-party 32-bit applications.
If 32-bit packages are available for upgrade, `yum` modules upgrades them alongside available 64-bit packages, however it only reports that 64-bit packages have been upgraded in the transaction. It should report upgraded 32-bit packages as well.
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
yum module
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.4
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/amg1127/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.1 (default, Dec 13 2020, 11:55:53) [GCC 10.2.0]
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
# (no output)
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Controller node: Arch Linux
Managed node: CentOS 7
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
The issue can be reproduced using the below Vagrantfile and Ansible playbook.
```ruby
# Vagrantfile
Vagrant.configure("2") do |config|
config.vm.box = "centos/7"
config.vm.box_version = "1809.1"
config.vm.provider "virtualbox" do |vb|
vb.gui = true
vb.memory = "1024"
end
config.vm.synced_folder ".", "/vagrant"
config.vm.provision "ansible" do |ansible|
ansible.playbook = "./ansible_playbook.yml"
end
end
```
<!--- Paste example playbooks or commands between quotes below -->
```yaml
# ansible_playbook.yml
---
- hosts: all
gather_facts: no
vars:
packages:
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc;2.17-260.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc-common;2.17-260.el7.x86_64.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libgcc;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libstdc++;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/ncurses-libs;5.9-14.20130511.el7_4.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nspr;4.19.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn-freebl;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-util;3.36.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/readline;6.2-10.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/sqlite;3.7.17-8.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/zlib;1.2.7-18.el7.i686.rpm'
tasks:
- name: Clean yum cache
shell: 'yum clean all; rm -Rfv /var/cache/yum'
become: yes
- name: Install some i686 packages alongside x86_64 ones
command:
argv: "{{ ['yum', '-y', 'localinstall', '--disablerepo=*'] + (packages | union(
packages | map('regex_replace', '\\.i686\\.rpm$', '.x86_64.rpm') | list) | map('replace', ';', '-') | list) }}"
become: yes
- name: Update all packages to the latest version
yum:
name: '*'
state: latest
update_cache: yes
update_only: yes
register: updated_packages
retries: 5
delay: 1
until: 'updated_packages is success'
become: yes
- name: Report the list of updated packages
debug:
msg:
- 'The packages below were updated:'
- "{{ updated_packages.changes.updated | default([]) | selectattr(0, 'in', (
packages | map('urlsplit', 'path') | map('basename') |
map('regex_replace', '^([^;]+);.*$', '\\1') | list
)) | list }}"
```
Place those files under an empty folder, install `Vagrant` and `VirtualBox` and launch the command line `vagrant up --provision`.
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
`yum` module should provide the below output:
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.i686 from updates"
],
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.i686 from base"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.i686 from base"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.i686 from updates"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.i686 from base"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.i686 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.i686 from base"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
`yum` module provides the below output:
<!--- Paste verbatim command output between quotes -->
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
|
https://github.com/ansible/ansible/issues/73284
|
https://github.com/ansible/ansible/pull/73548
|
8c413749fc1062f293a77482710f22c234dd3ebd
|
3504f4c45fc044b3ffd3fc96f02a9f261ec87048
| 2021-01-19T08:22:32Z |
python
| 2021-04-10T07:36:20Z |
test/integration/targets/yum/tasks/multiarch.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,284 |
Yum module only reports upgraded x86_64 packages when i686 packages have also been upgraded
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
I have created an Ansible playbook that invokes the `yum` module with the arguments `name="*" state=latest update_cache=yes update_only=yes`. The managed node is a 64-bit CentOS 7 system containing some i686 packages installed for compatibility with third-party 32-bit applications.
If 32-bit packages are available for upgrade, `yum` modules upgrades them alongside available 64-bit packages, however it only reports that 64-bit packages have been upgraded in the transaction. It should report upgraded 32-bit packages as well.
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
yum module
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.4
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/amg1127/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.1 (default, Dec 13 2020, 11:55:53) [GCC 10.2.0]
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
# (no output)
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Controller node: Arch Linux
Managed node: CentOS 7
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
The issue can be reproduced using the below Vagrantfile and Ansible playbook.
```ruby
# Vagrantfile
Vagrant.configure("2") do |config|
config.vm.box = "centos/7"
config.vm.box_version = "1809.1"
config.vm.provider "virtualbox" do |vb|
vb.gui = true
vb.memory = "1024"
end
config.vm.synced_folder ".", "/vagrant"
config.vm.provision "ansible" do |ansible|
ansible.playbook = "./ansible_playbook.yml"
end
end
```
<!--- Paste example playbooks or commands between quotes below -->
```yaml
# ansible_playbook.yml
---
- hosts: all
gather_facts: no
vars:
packages:
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc;2.17-260.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/glibc-common;2.17-260.el7.x86_64.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libgcc;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/libstdc++;4.8.5-36.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/ncurses-libs;5.9-14.20130511.el7_4.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nspr;4.19.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-softokn-freebl;3.36.0-5.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/nss-util;3.36.0-1.el7_5.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/readline;6.2-10.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/sqlite;3.7.17-8.el7.i686.rpm'
- 'https://vault.centos.org/7.6.1810/os/x86_64/Packages/zlib;1.2.7-18.el7.i686.rpm'
tasks:
- name: Clean yum cache
shell: 'yum clean all; rm -Rfv /var/cache/yum'
become: yes
- name: Install some i686 packages alongside x86_64 ones
command:
argv: "{{ ['yum', '-y', 'localinstall', '--disablerepo=*'] + (packages | union(
packages | map('regex_replace', '\\.i686\\.rpm$', '.x86_64.rpm') | list) | map('replace', ';', '-') | list) }}"
become: yes
- name: Update all packages to the latest version
yum:
name: '*'
state: latest
update_cache: yes
update_only: yes
register: updated_packages
retries: 5
delay: 1
until: 'updated_packages is success'
become: yes
- name: Report the list of updated packages
debug:
msg:
- 'The packages below were updated:'
- "{{ updated_packages.changes.updated | default([]) | selectattr(0, 'in', (
packages | map('urlsplit', 'path') | map('basename') |
map('regex_replace', '^([^;]+);.*$', '\\1') | list
)) | list }}"
```
Place those files under an empty folder, install `Vagrant` and `VirtualBox` and launch the command line `vagrant up --provision`.
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
`yum` module should provide the below output:
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.i686 from updates"
],
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.i686 from base"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.i686 from base"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.i686 from updates"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.i686 from base"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.i686 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.i686 from updates"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.i686 from base"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
`yum` module provides the below output:
<!--- Paste verbatim command output between quotes -->
```
TASK [Report the list of updated packages] *************************************
ok: [default] => {
"msg": [
"The packages below were updated:",
[
[
"nspr",
"4.25.0-2.el7_9.x86_64 from updates"
],
[
"glibc",
"2.17-317.el7.x86_64 from base"
],
[
"glibc-common",
"2.17-317.el7.x86_64 from base"
],
[
"nss-softokn",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"libstdc++",
"4.8.5-44.el7.x86_64 from base"
],
[
"nss-util",
"3.53.1-1.el7_9.x86_64 from updates"
],
[
"libgcc",
"4.8.5-44.el7.x86_64 from base"
],
[
"sqlite",
"3.7.17-8.el7_7.1.x86_64 from base"
],
[
"nss-softokn-freebl",
"3.53.1-6.el7_9.x86_64 from updates"
],
[
"readline",
"6.2-11.el7.x86_64 from base"
]
]
]
}
```
|
https://github.com/ansible/ansible/issues/73284
|
https://github.com/ansible/ansible/pull/73548
|
8c413749fc1062f293a77482710f22c234dd3ebd
|
3504f4c45fc044b3ffd3fc96f02a9f261ec87048
| 2021-01-19T08:22:32Z |
python
| 2021-04-10T07:36:20Z |
test/integration/targets/yum/vars/main.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,202 |
Differentiate Ansible vs ansible-core docsites via theme/color changes
|
<!--- Verify first that your improvement is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Explain the problem briefly below, add suggestions to wording or structure -->
It would be very helpful if we could modify the look/feel of the ansible-core docsite once it's ready so that it's obvious to readers which docsite they are looking at.
This is a stretch goal for #72032
<!--- HINT: Did you know the documentation has an "Edit on GitHub" link on every page ? -->
##### ISSUE TYPE
- Documentation Report
##### COMPONENT NAME
<!--- Write the short name of the rst file, module, plugin, task or feature below, use your best guess if unsure -->
docs.ansible.com
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. OS version, browser, etc. -->
##### ADDITIONAL INFORMATION
<!--- Describe how this improves the documentation, e.g. before/after situation or screenshots -->
<!--- HINT: You can paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/73202
|
https://github.com/ansible/ansible/pull/74200
|
b752d071633d244f98f327306e160f7915f38829
|
d7f826c987877ec2d050c80c886d167861795b5d
| 2021-01-12T19:58:26Z |
python
| 2021-04-12T20:39:13Z |
docs/docsite/_static/core.css
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,202 |
Differentiate Ansible vs ansible-core docsites via theme/color changes
|
<!--- Verify first that your improvement is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Explain the problem briefly below, add suggestions to wording or structure -->
It would be very helpful if we could modify the look/feel of the ansible-core docsite once it's ready so that it's obvious to readers which docsite they are looking at.
This is a stretch goal for #72032
<!--- HINT: Did you know the documentation has an "Edit on GitHub" link on every page ? -->
##### ISSUE TYPE
- Documentation Report
##### COMPONENT NAME
<!--- Write the short name of the rst file, module, plugin, task or feature below, use your best guess if unsure -->
docs.ansible.com
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. OS version, browser, etc. -->
##### ADDITIONAL INFORMATION
<!--- Describe how this improves the documentation, e.g. before/after situation or screenshots -->
<!--- HINT: You can paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/73202
|
https://github.com/ansible/ansible/pull/74200
|
b752d071633d244f98f327306e160f7915f38829
|
d7f826c987877ec2d050c80c886d167861795b5d
| 2021-01-12T19:58:26Z |
python
| 2021-04-12T20:39:13Z |
docs/docsite/sphinx_conf/core_conf.py
|
# -*- coding: utf-8 -*-
#
# documentation build configuration file, created by
# sphinx-quickstart on Sat Sep 27 13:23:22 2008-2009.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed
# automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import os
# pip install sphinx_rtd_theme
# import sphinx_rtd_theme
# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
# sys.path.append(os.path.abspath('some/directory'))
#
sys.path.insert(0, os.path.join('ansible', 'lib'))
sys.path.append(os.path.abspath(os.path.join('..', '_extensions')))
# We want sphinx to document the ansible modules contained in this repository,
# not those that may happen to be installed in the version
# of Python used to run sphinx. When sphinx loads in order to document,
# the repository version needs to be the one that is loaded:
sys.path.insert(0, os.path.abspath(os.path.join('..', '..', '..', 'lib')))
VERSION = 'devel'
AUTHOR = 'Ansible, Inc'
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings.
# They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# TEST: 'sphinxcontrib.fulltoc'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'pygments_lexer', 'notfound.extension']
# Later on, add 'sphinx.ext.viewcode' to the list if you want to have
# colorized code generated too for references.
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'Ansible'
copyright = "2021 Red Hat, Inc."
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
version = VERSION
# The full version, including alpha/beta/rc tags.
release = VERSION
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# unused_docs = []
# List of directories, relative to source directories, that shouldn't be
# searched for source files.
# exclude_dirs = []
# A list of glob-style patterns that should be excluded when looking
# for source files.
exclude_patterns = [
'2.10_index.rst',
'ansible_index.rst',
'core_index.rst',
'galaxy',
'network',
'scenario_guides',
'porting_guides/porting_guides.rst',
'porting_guides/porting_guide_2*',
'porting_guides/porting_guide_3.rst',
'porting_guides/porting_guide_4.rst',
'roadmap/index.rst',
'roadmap/ansible_roadmap_index.rst',
'roadmap/old_roadmap_index.rst',
'roadmap/ROADMAP_2_5.rst',
'roadmap/ROADMAP_2_6.rst',
'roadmap/ROADMAP_2_7.rst',
'roadmap/ROADMAP_2_8.rst',
'roadmap/ROADMAP_2_9.rst',
'roadmap/COLLECTIONS*'
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'YAML+Jinja'
# Substitutions, variables, entities, & shortcuts for text which do not need to link to anything.
# For titles which should be a link, use the intersphinx anchors set at the index, chapter, and section levels, such as qi_start_:
# |br| is useful for formatting fields inside of tables
# |_| is a nonbreaking space; similarly useful inside of tables
rst_epilog = """
.. |br| raw:: html
<br>
.. |_| unicode:: 0xA0
:trim:
"""
# Options for HTML output
# -----------------------
html_theme_path = ['../_themes']
html_theme = 'sphinx_rtd_theme'
html_short_title = 'Ansible Core Documentation'
html_show_sphinx = False
html_theme_options = {
'canonical_url': "https://docs.ansible.com/ansible/latest/",
'vcs_pageview_mode': 'edit'
}
html_context = {
'display_github': 'True',
'github_user': 'ansible',
'github_repo': 'ansible',
'github_version': 'devel/docs/docsite/rst/',
'github_module_version': 'devel/lib/ansible/modules/',
'github_root_dir': 'devel/lib/ansible',
'github_cli_version': 'devel/lib/ansible/cli/',
'current_version': version,
'latest_version': '2.10',
# list specifically out of order to make latest work
'available_versions': ('2.10', 'devel',),
'css_files': ('_static/ansible.css', # overrides to the standard theme
),
}
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
# html_style = 'solar.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Ansible Core Documentation'
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (within the static path) to place at the top of
# the sidebar.
# html_logo =
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = 'favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['../_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
html_copy_source = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = 'https://docs.ansible.com/ansible/latest'
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Poseidodoc'
# Configuration for sphinx-notfound-pages
# with no 'notfound_template' and no 'notfound_context' set,
# the extension builds 404.rst into a location-agnostic 404 page
#
# default is `en` - using this for the sub-site:
notfound_default_language = "ansible"
# default is `latest`:
# setting explicitly - docsite serves up /ansible/latest/404.html
# so keep this set to `latest` even on the `devel` branch
# then no maintenance is needed when we branch a new stable_x.x
notfound_default_version = "latest"
# makes default setting explicit:
notfound_no_urls_prefix = False
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class
# [howto/manual]).
latex_documents = [
('index', 'ansible.tex', 'Ansible 2.2 Documentation', AUTHOR, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
autoclass_content = 'both'
# Note: Our strategy for intersphinx mappings is to have the upstream build location as the
# canonical source and then cached copies of the mapping stored locally in case someone is building
# when disconnected from the internet. We then have a script to update the cached copies.
#
# Because of that, each entry in this mapping should have this format:
# name: ('http://UPSTREAM_URL', (None, 'path/to/local/cache.inv'))
#
# The update script depends on this format so deviating from this (for instance, adding a third
# location for the mappning to live) will confuse it.
intersphinx_mapping = {'python': ('https://docs.python.org/2/', (None, '../python2.inv')),
'python3': ('https://docs.python.org/3/', (None, '../python3.inv')),
'jinja2': ('http://jinja.palletsprojects.com/', (None, '../jinja2.inv')),
'ansible_2_10': ('https://docs.ansible.com/ansible/2.10/', (None, '../ansible_2_10.inv')),
'ansible_2_9': ('https://docs.ansible.com/ansible/2.9/', (None, '../ansible_2_9.inv')),
'ansible_2_8': ('https://docs.ansible.com/ansible/2.8/', (None, '../ansible_2_8.inv')),
'ansible_2_7': ('https://docs.ansible.com/ansible/2.7/', (None, '../ansible_2_7.inv')),
'ansible_2_6': ('https://docs.ansible.com/ansible/2.6/', (None, '../ansible_2_6.inv')),
'ansible_2_5': ('https://docs.ansible.com/ansible/2.5/', (None, '../ansible_2_5.inv')),
}
# linckchecker settings
linkcheck_ignore = [
r'http://irc\.freenode\.net',
]
linkcheck_workers = 25
# linkcheck_anchors = False
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,145 |
fact_cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
fact_cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/vars/fact_cache.py:99:12: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/vars/fact_cache.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74145
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:08Z |
python
| 2021-04-13T15:08:20Z |
changelogs/fragments/cache-deprecations.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,145 |
fact_cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
fact_cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/vars/fact_cache.py:99:12: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/vars/fact_cache.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74145
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:08Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/plugins/cache/__init__.py
|
# (c) 2014, Michael DeHaan <[email protected]>
# (c) 2018, Ansible Project
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import os
import time
import errno
from abc import ABCMeta, abstractmethod
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six import with_metaclass
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins import AnsiblePlugin
from ansible.plugins.loader import cache_loader
from ansible.utils.collection_loader import resource_from_fqcr
from ansible.utils.display import Display
from ansible.vars.fact_cache import FactCache as RealFactCache
display = Display()
class FactCache(RealFactCache):
"""
This is for backwards compatibility. Will be removed after deprecation. It was removed as it
wasn't actually part of the cache plugin API. It's actually the code to make use of cache
plugins, not the cache plugin itself. Subclassing it wouldn't yield a usable Cache Plugin and
there was no facility to use it as anything else.
"""
def __init__(self, *args, **kwargs):
display.deprecated('ansible.plugins.cache.FactCache has been moved to'
' ansible.vars.fact_cache.FactCache. If you are looking for the class'
' to subclass for a cache plugin, you want'
' ansible.plugins.cache.BaseCacheModule or one of its subclasses.',
version='2.12', collection_name='ansible.builtin')
super(FactCache, self).__init__(*args, **kwargs)
class BaseCacheModule(AnsiblePlugin):
# Backwards compat only. Just import the global display instead
_display = display
def __init__(self, *args, **kwargs):
# Third party code is not using cache_loader to load plugin - fall back to previous behavior
if not hasattr(self, '_load_name'):
display.deprecated('Rather than importing custom CacheModules directly, use ansible.plugins.loader.cache_loader',
version='2.14', collection_name='ansible.builtin')
self._load_name = self.__module__.split('.')[-1]
self._load_name = resource_from_fqcr(self.__module__)
super(BaseCacheModule, self).__init__()
self.set_options(var_options=args, direct=kwargs)
@abstractmethod
def get(self, key):
pass
@abstractmethod
def set(self, key, value):
pass
@abstractmethod
def keys(self):
pass
@abstractmethod
def contains(self, key):
pass
@abstractmethod
def delete(self, key):
pass
@abstractmethod
def flush(self):
pass
@abstractmethod
def copy(self):
pass
class BaseFileCacheModule(BaseCacheModule):
"""
A caching module backed by file based storage.
"""
def __init__(self, *args, **kwargs):
try:
super(BaseFileCacheModule, self).__init__(*args, **kwargs)
self._cache_dir = self._get_cache_connection(self.get_option('_uri'))
self._timeout = float(self.get_option('_timeout'))
except KeyError:
self._cache_dir = self._get_cache_connection(C.CACHE_PLUGIN_CONNECTION)
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self.plugin_name = resource_from_fqcr(self.__module__)
self._cache = {}
self.validate_cache_connection()
def _get_cache_connection(self, source):
if source:
try:
return os.path.expanduser(os.path.expandvars(source))
except TypeError:
pass
def validate_cache_connection(self):
if not self._cache_dir:
raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option "
"to be set (to a writeable directory path)" % self.plugin_name)
if not os.path.exists(self._cache_dir):
try:
os.makedirs(self._cache_dir)
except (OSError, IOError) as e:
raise AnsibleError("error in '%s' cache plugin while trying to create cache dir %s : %s" % (self.plugin_name, self._cache_dir, to_bytes(e)))
else:
for x in (os.R_OK, os.W_OK, os.X_OK):
if not os.access(self._cache_dir, x):
raise AnsibleError("error in '%s' cache, configured path (%s) does not have necessary permissions (rwx), disabling plugin" % (
self.plugin_name, self._cache_dir))
def _get_cache_file_name(self, key):
prefix = self.get_option('_prefix')
if prefix:
cachefile = "%s/%s%s" % (self._cache_dir, prefix, key)
else:
cachefile = "%s/%s" % (self._cache_dir, key)
return cachefile
def get(self, key):
""" This checks the in memory cache first as the fact was not expired at 'gather time'
and it would be problematic if the key did expire after some long running tasks and
user gets 'undefined' error in the same play """
if key not in self._cache:
if self.has_expired(key) or key == "":
raise KeyError
cachefile = self._get_cache_file_name(key)
try:
value = self._load(cachefile)
self._cache[key] = value
except ValueError as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s. "
"Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
self.delete(key)
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. "
"It has been removed, so you can re-run your command now." % cachefile)
except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
raise KeyError
except Exception as e:
raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e)))
return self._cache.get(key)
def set(self, key, value):
self._cache[key] = value
cachefile = self._get_cache_file_name(key)
try:
self._dump(value, cachefile)
except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to write to %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
def has_expired(self, key):
if self._timeout == 0:
return False
cachefile = self._get_cache_file_name(key)
try:
st = os.stat(cachefile)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return False
else:
display.warning("error in '%s' cache plugin while trying to stat %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
return False
if time.time() - st.st_mtime <= self._timeout:
return False
if key in self._cache:
del self._cache[key]
return True
def keys(self):
keys = []
for k in os.listdir(self._cache_dir):
if not (k.startswith('.') or self.has_expired(k)):
keys.append(k)
return keys
def contains(self, key):
cachefile = self._get_cache_file_name(key)
if key in self._cache:
return True
if self.has_expired(key):
return False
try:
os.stat(cachefile)
return True
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return False
else:
display.warning("error in '%s' cache plugin while trying to stat %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
def delete(self, key):
try:
del self._cache[key]
except KeyError:
pass
try:
os.remove(self._get_cache_file_name(key))
except (OSError, IOError):
pass # TODO: only pass on non existing?
def flush(self):
self._cache = {}
for key in self.keys():
self.delete(key)
def copy(self):
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
@abstractmethod
def _load(self, filepath):
"""
Read data from a filepath and return it as a value
:arg filepath: The filepath to read from.
:returns: The value stored in the filepath
This method reads from the file on disk and takes care of any parsing
and transformation of the data before returning it. The value
returned should be what Ansible would expect if it were uncached data.
.. note:: Filehandles have advantages but calling code doesn't know
whether this file is text or binary, should be decoded, or accessed via
a library function. Therefore the API uses a filepath and opens
the file inside of the method.
"""
pass
@abstractmethod
def _dump(self, value, filepath):
"""
Write data to a filepath
:arg value: The value to store
:arg filepath: The filepath to store it at
"""
pass
class CachePluginAdjudicator(MutableMapping):
"""
Intermediary between a cache dictionary and a CacheModule
"""
def __init__(self, plugin_name='memory', **kwargs):
self._cache = {}
self._retrieved = {}
self._plugin = cache_loader.get(plugin_name, **kwargs)
if not self._plugin:
raise AnsibleError('Unable to load the cache plugin (%s).' % plugin_name)
self._plugin_name = plugin_name
def update_cache_if_changed(self):
if self._retrieved != self._cache:
self.set_cache()
def set_cache(self):
for top_level_cache_key in self._cache.keys():
self._plugin.set(top_level_cache_key, self._cache[top_level_cache_key])
self._retrieved = copy.deepcopy(self._cache)
def load_whole_cache(self):
for key in self._plugin.keys():
self._cache[key] = self._plugin.get(key)
def __repr__(self):
return to_text(self._cache)
def __iter__(self):
return iter(self.keys())
def __len__(self):
return len(self.keys())
def _do_load_key(self, key):
load = False
if all([
key not in self._cache,
key not in self._retrieved,
self._plugin_name != 'memory',
self._plugin.contains(key),
]):
load = True
return load
def __getitem__(self, key):
if self._do_load_key(key):
try:
self._cache[key] = self._plugin.get(key)
except KeyError:
pass
else:
self._retrieved[key] = self._cache[key]
return self._cache[key]
def get(self, key, default=None):
if self._do_load_key(key):
try:
self._cache[key] = self._plugin.get(key)
except KeyError as e:
pass
else:
self._retrieved[key] = self._cache[key]
return self._cache.get(key, default)
def items(self):
return self._cache.items()
def values(self):
return self._cache.values()
def keys(self):
return self._cache.keys()
def pop(self, key, *args):
if args:
return self._cache.pop(key, args[0])
return self._cache.pop(key)
def __delitem__(self, key):
del self._cache[key]
def __setitem__(self, key, value):
self._cache[key] = value
def flush(self):
self._plugin.flush()
self._cache = {}
def update(self, value):
self._cache.update(value)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,145 |
fact_cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
fact_cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/vars/fact_cache.py:99:12: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/vars/fact_cache.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74145
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:08Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/plugins/inventory/__init__.py
|
# (c) 2017, Red Hat, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import hashlib
import os
import string
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.inventory.group import to_safe_group_name as original_safe
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins import AnsiblePlugin
from ansible.plugins.cache import CachePluginAdjudicator as CacheObject
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.common._collections_compat import Mapping
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import string_types
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars, load_extra_vars
display = Display()
# Helper methods
def to_safe_group_name(name):
# placeholder for backwards compat
return original_safe(name, force=True, silent=True)
def detect_range(line=None):
'''
A helper function that checks a given host line to see if it contains
a range pattern described in the docstring above.
Returns True if the given line contains a pattern, else False.
'''
return '[' in line
def expand_hostname_range(line=None):
'''
A helper function that expands a given line that contains a pattern
specified in top docstring, and returns a list that consists of the
expanded version.
The '[' and ']' characters are used to maintain the pseudo-code
appearance. They are replaced in this function with '|' to ease
string splitting.
References: https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html#hosts-and-groups
'''
all_hosts = []
if line:
# A hostname such as db[1:6]-node is considered to consists
# three parts:
# head: 'db'
# nrange: [1:6]; range() is a built-in. Can't use the name
# tail: '-node'
# Add support for multiple ranges in a host so:
# db[01:10:3]node-[01:10]
# - to do this we split off at the first [...] set, getting the list
# of hosts and then repeat until none left.
# - also add an optional third parameter which contains the step. (Default: 1)
# so range can be [01:10:2] -> 01 03 05 07 09
(head, nrange, tail) = line.replace('[', '|', 1).replace(']', '|', 1).split('|')
bounds = nrange.split(":")
if len(bounds) != 2 and len(bounds) != 3:
raise AnsibleError("host range must be begin:end or begin:end:step")
beg = bounds[0]
end = bounds[1]
if len(bounds) == 2:
step = 1
else:
step = bounds[2]
if not beg:
beg = "0"
if not end:
raise AnsibleError("host range must specify end value")
if beg[0] == '0' and len(beg) > 1:
rlen = len(beg) # range length formatting hint
if rlen != len(end):
raise AnsibleError("host range must specify equal-length begin and end formats")
def fill(x):
return str(x).zfill(rlen) # range sequence
else:
fill = str
try:
i_beg = string.ascii_letters.index(beg)
i_end = string.ascii_letters.index(end)
if i_beg > i_end:
raise AnsibleError("host range must have begin <= end")
seq = list(string.ascii_letters[i_beg:i_end + 1:int(step)])
except ValueError: # not an alpha range
seq = range(int(beg), int(end) + 1, int(step))
for rseq in seq:
hname = ''.join((head, fill(rseq), tail))
if detect_range(hname):
all_hosts.extend(expand_hostname_range(hname))
else:
all_hosts.append(hname)
return all_hosts
def get_cache_plugin(plugin_name, **kwargs):
try:
cache = CacheObject(plugin_name, **kwargs)
except AnsibleError as e:
if 'fact_caching_connection' in to_native(e):
raise AnsibleError("error, '%s' inventory cache plugin requires the one of the following to be set "
"to a writeable directory path:\nansible.cfg:\n[default]: fact_caching_connection,\n"
"[inventory]: cache_connection;\nEnvironment:\nANSIBLE_INVENTORY_CACHE_CONNECTION,\n"
"ANSIBLE_CACHE_PLUGIN_CONNECTION." % plugin_name)
else:
raise e
if plugin_name != 'memory' and kwargs and not getattr(cache._plugin, '_options', None):
raise AnsibleError('Unable to use cache plugin {0} for inventory. Cache options were provided but may not reconcile '
'correctly unless set via set_options. Refer to the porting guide if the plugin derives user settings '
'from ansible.constants.'.format(plugin_name))
return cache
class BaseInventoryPlugin(AnsiblePlugin):
""" Parses an Inventory Source"""
TYPE = 'generator'
# 3rd party plugins redefine this to
# use custom group name sanitization
# since constructed features enforce
# it by default.
_sanitize_group_name = staticmethod(to_safe_group_name)
def __init__(self):
super(BaseInventoryPlugin, self).__init__()
self._options = {}
self.inventory = None
self.display = display
self._vars = {}
def parse(self, inventory, loader, path, cache=True):
''' Populates inventory from the given data. Raises an error on any parse failure
:arg inventory: a copy of the previously accumulated inventory data,
to be updated with any new data this plugin provides.
The inventory can be empty if no other source/plugin ran successfully.
:arg loader: a reference to the DataLoader, which can read in YAML and JSON files,
it also has Vault support to automatically decrypt files.
:arg path: the string that represents the 'inventory source',
normally a path to a configuration file for this inventory,
but it can also be a raw string for this plugin to consume
:arg cache: a boolean that indicates if the plugin should use the cache or not
you can ignore if this plugin does not implement caching.
'''
self.loader = loader
self.inventory = inventory
self.templar = Templar(loader=loader)
self._vars = load_extra_vars(loader)
def verify_file(self, path):
''' Verify if file is usable by this plugin, base does minimal accessibility check
:arg path: a string that was passed as an inventory source,
it normally is a path to a config file, but this is not a requirement,
it can also be parsed itself as the inventory data to process.
So only call this base class if you expect it to be a file.
'''
valid = False
b_path = to_bytes(path, errors='surrogate_or_strict')
if (os.path.exists(b_path) and os.access(b_path, os.R_OK)):
valid = True
else:
self.display.vvv('Skipping due to inventory source not existing or not being readable by the current user')
return valid
def _populate_host_vars(self, hosts, variables, group=None, port=None):
if not isinstance(variables, Mapping):
raise AnsibleParserError("Invalid data from file, expected dictionary and got:\n\n%s" % to_native(variables))
for host in hosts:
self.inventory.add_host(host, group=group, port=port)
for k in variables:
self.inventory.set_variable(host, k, variables[k])
def _read_config_data(self, path):
''' validate config and set options as appropriate
:arg path: path to common yaml format config file for this plugin
'''
config = {}
try:
# avoid loader cache so meta: refresh_inventory can pick up config changes
# if we read more than once, fs cache should be good enough
config = self.loader.load_from_file(path, cache=False)
except Exception as e:
raise AnsibleParserError(to_native(e))
# a plugin can be loaded via many different names with redirection- if so, we want to accept any of those names
valid_names = getattr(self, '_redirected_names') or [self.NAME]
if not config:
# no data
raise AnsibleParserError("%s is empty" % (to_native(path)))
elif config.get('plugin') not in valid_names:
# this is not my config file
raise AnsibleParserError("Incorrect plugin name in file: %s" % config.get('plugin', 'none found'))
elif not isinstance(config, Mapping):
# configs are dictionaries
raise AnsibleParserError('inventory source has invalid structure, it should be a dictionary, got: %s' % type(config))
self.set_options(direct=config, var_options=self._vars)
if 'cache' in self._options and self.get_option('cache'):
cache_option_keys = [('_uri', 'cache_connection'), ('_timeout', 'cache_timeout'), ('_prefix', 'cache_prefix')]
cache_options = dict((opt[0], self.get_option(opt[1])) for opt in cache_option_keys if self.get_option(opt[1]) is not None)
self._cache = get_cache_plugin(self.get_option('cache_plugin'), **cache_options)
return config
def _consume_options(self, data):
''' update existing options from alternate configuration sources not normally used by Ansible.
Many API libraries already have existing configuration sources, this allows plugin author to leverage them.
:arg data: key/value pairs that correspond to configuration options for this plugin
'''
for k in self._options:
if k in data:
self._options[k] = data.pop(k)
def _expand_hostpattern(self, hostpattern):
'''
Takes a single host pattern and returns a list of hostnames and an
optional port number that applies to all of them.
'''
# Can the given hostpattern be parsed as a host with an optional port
# specification?
try:
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
except Exception:
# not a recognizable host pattern
pattern = hostpattern
port = None
# Once we have separated the pattern, we expand it into list of one or
# more hostnames, depending on whether it contains any [x:y] ranges.
if detect_range(pattern):
hostnames = expand_hostname_range(pattern)
else:
hostnames = [pattern]
return (hostnames, port)
class BaseFileInventoryPlugin(BaseInventoryPlugin):
""" Parses a File based Inventory Source"""
TYPE = 'storage'
def __init__(self):
super(BaseFileInventoryPlugin, self).__init__()
class DeprecatedCache(object):
def __init__(self, real_cacheable):
self.real_cacheable = real_cacheable
def get(self, key):
display.deprecated('InventoryModule should utilize self._cache as a dict instead of self.cache. '
'When expecting a KeyError, use self._cache[key] instead of using self.cache.get(key). '
'self._cache is a dictionary and will return a default value instead of raising a KeyError '
'when the key does not exist', version='2.12', collection_name='ansible.builtin')
return self.real_cacheable._cache[key]
def set(self, key, value):
display.deprecated('InventoryModule should utilize self._cache as a dict instead of self.cache. '
'To set the self._cache dictionary, use self._cache[key] = value instead of self.cache.set(key, value). '
'To force update the underlying cache plugin with the contents of self._cache before parse() is complete, '
'call self.set_cache_plugin and it will use the self._cache dictionary to update the cache plugin',
version='2.12', collection_name='ansible.builtin')
self.real_cacheable._cache[key] = value
self.real_cacheable.set_cache_plugin()
def __getattr__(self, name):
display.deprecated('InventoryModule should utilize self._cache instead of self.cache',
version='2.12', collection_name='ansible.builtin')
return self.real_cacheable._cache.__getattribute__(name)
class Cacheable(object):
_cache = CacheObject()
@property
def cache(self):
return DeprecatedCache(self)
def load_cache_plugin(self):
plugin_name = self.get_option('cache_plugin')
cache_option_keys = [('_uri', 'cache_connection'), ('_timeout', 'cache_timeout'), ('_prefix', 'cache_prefix')]
cache_options = dict((opt[0], self.get_option(opt[1])) for opt in cache_option_keys if self.get_option(opt[1]) is not None)
self._cache = get_cache_plugin(plugin_name, **cache_options)
def get_cache_key(self, path):
return "{0}_{1}".format(self.NAME, self._get_cache_prefix(path))
def _get_cache_prefix(self, path):
''' create predictable unique prefix for plugin/inventory '''
m = hashlib.sha1()
m.update(to_bytes(self.NAME, errors='surrogate_or_strict'))
d1 = m.hexdigest()
n = hashlib.sha1()
n.update(to_bytes(path, errors='surrogate_or_strict'))
d2 = n.hexdigest()
return 's_'.join([d1[:5], d2[:5]])
def clear_cache(self):
self._cache.flush()
def update_cache_if_changed(self):
self._cache.update_cache_if_changed()
def set_cache_plugin(self):
self._cache.set_cache()
class Constructable(object):
def _compose(self, template, variables):
''' helper method for plugins to compose variables for Ansible based on jinja2 expression and inventory vars'''
t = self.templar
try:
use_extra = self.get_option('use_extra_vars')
except Exception:
use_extra = False
if use_extra:
t.available_variables = combine_vars(variables, self._vars)
else:
t.available_variables = variables
return t.template('%s%s%s' % (t.environment.variable_start_string, template, t.environment.variable_end_string), disable_lookups=True)
def _set_composite_vars(self, compose, variables, host, strict=False):
''' loops over compose entries to create vars for hosts '''
if compose and isinstance(compose, dict):
for varname in compose:
try:
composite = self._compose(compose[varname], variables)
except Exception as e:
if strict:
raise AnsibleError("Could not set %s for host %s: %s" % (varname, host, to_native(e)))
continue
self.inventory.set_variable(host, varname, composite)
def _add_host_to_composed_groups(self, groups, variables, host, strict=False, fetch_hostvars=True):
''' helper to create complex groups for plugins based on jinja2 conditionals, hosts that meet the conditional are added to group'''
# process each 'group entry'
if groups and isinstance(groups, dict):
if fetch_hostvars:
variables = combine_vars(variables, self.inventory.get_host(host).get_vars())
self.templar.available_variables = variables
for group_name in groups:
conditional = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % groups[group_name]
group_name = self._sanitize_group_name(group_name)
try:
result = boolean(self.templar.template(conditional))
except Exception as e:
if strict:
raise AnsibleParserError("Could not add host %s to group %s: %s" % (host, group_name, to_native(e)))
continue
if result:
# ensure group exists, use sanitized name
group_name = self.inventory.add_group(group_name)
# add host to group
self.inventory.add_child(group_name, host)
def _add_host_to_keyed_groups(self, keys, variables, host, strict=False, fetch_hostvars=True):
''' helper to create groups for plugins based on variable values and add the corresponding hosts to it'''
if keys and isinstance(keys, list):
for keyed in keys:
if keyed and isinstance(keyed, dict):
if fetch_hostvars:
variables = combine_vars(variables, self.inventory.get_host(host).get_vars())
try:
key = self._compose(keyed.get('key'), variables)
except Exception as e:
if strict:
raise AnsibleParserError("Could not generate group for host %s from %s entry: %s" % (host, keyed.get('key'), to_native(e)))
continue
if key:
prefix = keyed.get('prefix', '')
sep = keyed.get('separator', '_')
raw_parent_name = keyed.get('parent_group', None)
if raw_parent_name:
try:
raw_parent_name = self.templar.template(raw_parent_name)
except AnsibleError as e:
if strict:
raise AnsibleParserError("Could not generate parent group %s for group %s: %s" % (raw_parent_name, key, to_native(e)))
continue
new_raw_group_names = []
if isinstance(key, string_types):
new_raw_group_names.append(key)
elif isinstance(key, list):
for name in key:
new_raw_group_names.append(name)
elif isinstance(key, Mapping):
for (gname, gval) in key.items():
name = '%s%s%s' % (gname, sep, gval)
new_raw_group_names.append(name)
else:
raise AnsibleParserError("Invalid group name format, expected a string or a list of them or dictionary, got: %s" % type(key))
for bare_name in new_raw_group_names:
if prefix == '' and self.get_option('leading_separator') is False:
sep = ''
gname = self._sanitize_group_name('%s%s%s' % (prefix, sep, bare_name))
result_gname = self.inventory.add_group(gname)
self.inventory.add_host(host, result_gname)
if raw_parent_name:
parent_name = self._sanitize_group_name(raw_parent_name)
self.inventory.add_group(parent_name)
self.inventory.add_child(parent_name, result_gname)
else:
# exclude case of empty list and dictionary, because these are valid constructions
# simply no groups need to be constructed, but are still falsy
if strict and key not in ([], {}):
raise AnsibleParserError("No key or key resulted empty for %s in host %s, invalid entry" % (keyed.get('key'), host))
else:
raise AnsibleParserError("Invalid keyed group entry, it must be a dictionary: %s " % keyed)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,145 |
fact_cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
fact_cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/vars/fact_cache.py:99:12: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/vars/fact_cache.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74145
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:08Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/vars/fact_cache.py
|
# Copyright: (c) 2014, Michael DeHaan <[email protected]>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins.loader import cache_loader
from ansible.utils.display import Display
display = Display()
class FactCache(MutableMapping):
def __init__(self, *args, **kwargs):
self._plugin = cache_loader.get(C.CACHE_PLUGIN)
if not self._plugin:
raise AnsibleError('Unable to load the facts cache plugin (%s).' % (C.CACHE_PLUGIN))
super(FactCache, self).__init__(*args, **kwargs)
def __getitem__(self, key):
if not self._plugin.contains(key):
raise KeyError
return self._plugin.get(key)
def __setitem__(self, key, value):
self._plugin.set(key, value)
def __delitem__(self, key):
self._plugin.delete(key)
def __contains__(self, key):
return self._plugin.contains(key)
def __iter__(self):
return iter(self._plugin.keys())
def __len__(self):
return len(self._plugin.keys())
def copy(self):
""" Return a primitive copy of the keys and values from the cache. """
return dict(self)
def keys(self):
return self._plugin.keys()
def flush(self):
""" Flush the fact cache of all keys. """
self._plugin.flush()
def first_order_merge(self, key, value):
host_facts = {key: value}
try:
host_cache = self._plugin.get(key)
if host_cache:
host_cache.update(value)
host_facts[key] = host_cache
except KeyError:
pass
super(FactCache, self).update(host_facts)
def update(self, *args):
"""
Backwards compat shim
We thought we needed this to ensure we always called the plugin's set() method but
MutableMapping.update() will call our __setitem__() just fine. It's the calls to update
that we need to be careful of. This contains a bug::
fact_cache[host.name].update(facts)
It retrieves a *copy* of the facts for host.name and then updates the copy. So the changes
aren't persisted.
Instead we need to do::
fact_cache.update({host.name, facts})
Which will use FactCache's update() method.
We currently need this shim for backwards compat because the update() method that we had
implemented took key and value as arguments instead of taking a dict. We can remove the
shim in 2.12 as MutableMapping.update() should do everything that we need.
"""
if len(args) == 2:
# Deprecated. Call the new function with this name
display.deprecated('Calling FactCache().update(key, value) is deprecated. Use'
' FactCache().first_order_merge(key, value) if you want the old'
' behaviour or use FactCache().update({key: value}) if you want'
' dict-like behaviour.', version='2.12', collection_name='ansible.builtin')
return self.first_order_merge(*args)
elif len(args) == 1:
host_facts = args[0]
else:
raise TypeError('update expected at most 1 argument, got {0}'.format(len(args)))
super(FactCache, self).update(host_facts)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,145 |
fact_cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
fact_cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/vars/fact_cache.py:99:12: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/vars/fact_cache.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74145
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:08Z |
python
| 2021-04-13T15:08:20Z |
test/sanity/ignore.txt
|
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
lib/ansible/cli/console.py pylint:blacklisted-name
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/cli/scripts/ansible_cli_stub.py pylint:ansible-deprecated-version
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/pycompat24.py no-get-exception
lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py no-basestring
lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
lib/ansible/module_utils/urls.py pylint:blacklisted-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
lib/ansible/modules/async_wrapper.py use-argspec-type-path
lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
lib/ansible/modules/copy.py pylint:blacklisted-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
lib/ansible/modules/git.py pylint:blacklisted-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/iptables.py pylint:blacklisted-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/pip.py pylint:blacklisted-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/uri.py pylint:blacklisted-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:blacklisted-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/conditional.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/play_context.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/async_status.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/inventory/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/script.py pylint:ansible-deprecated-version
lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
lib/ansible/plugins/strategy/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
lib/ansible/vars/fact_cache.py pylint:ansible-deprecated-version
lib/ansible/vars/hostvars.py pylint:blacklisted-name
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-argument-not-valid-type
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/integration/plugins/module_utils/database.py future-import-boilerplate
test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:blacklisted-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
test/units/modules/test_apt.py pylint:blacklisted-name
test/units/parsing/vault/test_vault.py pylint:blacklisted-name
test/units/playbook/role/test_role.py pylint:blacklisted-name
test/units/plugins/test_plugins.py pylint:blacklisted-name
test/units/template/test_templar.py pylint:blacklisted-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
test/utils/shippable/check_matrix.py replace-urlopen
test/utils/shippable/timing.py shebang
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,145 |
fact_cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
fact_cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/vars/fact_cache.py:99:12: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/vars/fact_cache.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74145
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:08Z |
python
| 2021-04-13T15:08:20Z |
test/units/plugins/cache/test_cache.py
|
# (c) 2012-2015, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest, mock
from ansible.errors import AnsibleError
from ansible.plugins.cache import FactCache, CachePluginAdjudicator
from ansible.plugins.cache.base import BaseCacheModule
from ansible.plugins.cache.memory import CacheModule as MemoryCache
from ansible.plugins.loader import cache_loader
import pytest
class TestCachePluginAdjudicator(unittest.TestCase):
def setUp(self):
# memory plugin cache
self.cache = CachePluginAdjudicator()
self.cache['cache_key'] = {'key1': 'value1', 'key2': 'value2'}
self.cache['cache_key_2'] = {'key': 'value'}
def test___setitem__(self):
self.cache['new_cache_key'] = {'new_key1': ['new_value1', 'new_value2']}
assert self.cache['new_cache_key'] == {'new_key1': ['new_value1', 'new_value2']}
def test_inner___setitem__(self):
self.cache['new_cache_key'] = {'new_key1': ['new_value1', 'new_value2']}
self.cache['new_cache_key']['new_key1'][0] = 'updated_value1'
assert self.cache['new_cache_key'] == {'new_key1': ['updated_value1', 'new_value2']}
def test___contains__(self):
assert 'cache_key' in self.cache
assert 'not_cache_key' not in self.cache
def test_get(self):
assert self.cache.get('cache_key') == {'key1': 'value1', 'key2': 'value2'}
def test_get_with_default(self):
assert self.cache.get('foo', 'bar') == 'bar'
def test_get_without_default(self):
assert self.cache.get('foo') is None
def test___getitem__(self):
with pytest.raises(KeyError) as err:
self.cache['foo']
def test_pop_with_default(self):
assert self.cache.pop('foo', 'bar') == 'bar'
def test_pop_without_default(self):
with pytest.raises(KeyError) as err:
assert self.cache.pop('foo')
def test_pop(self):
v = self.cache.pop('cache_key_2')
assert v == {'key': 'value'}
assert 'cache_key_2' not in self.cache
def test_update(self):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_flush(self):
# Fake that the cache already has some data in it but the adjudicator
# hasn't loaded it in.
self.cache._plugin.set('monkey', 'animal')
self.cache._plugin.set('wolf', 'animal')
self.cache._plugin.set('another wolf', 'another animal')
# The adjudicator does't know about the new entries
assert len(self.cache) == 2
# But the cache itself does
assert len(self.cache._plugin._cache) == 3
# If we call flush, both the adjudicator and the cache should flush
self.cache.flush()
assert len(self.cache) == 0
assert len(self.cache._plugin._cache) == 0
class TestFactCache(unittest.TestCase):
def setUp(self):
with mock.patch('ansible.constants.CACHE_PLUGIN', 'memory'):
self.cache = FactCache()
def test_copy(self):
self.cache['avocado'] = 'fruit'
self.cache['daisy'] = 'flower'
a_copy = self.cache.copy()
self.assertEqual(type(a_copy), dict)
self.assertEqual(a_copy, dict(avocado='fruit', daisy='flower'))
def test_plugin_load_failure(self):
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):
self.assertRaisesRegexp(AnsibleError,
"Unable to load the facts cache plugin.*json.*",
FactCache)
def test_update(self):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_update_legacy(self):
self.cache.update('cache_key', {'key2': 'updatedvalue'})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_update_legacy_key_exists(self):
self.cache['cache_key'] = {'key': 'value', 'key2': 'value2'}
self.cache.update('cache_key', {'key': 'updatedvalue'})
assert self.cache['cache_key']['key'] == 'updatedvalue'
assert self.cache['cache_key']['key2'] == 'value2'
class TestAbstractClass(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_subclass_error(self):
class CacheModule1(BaseCacheModule):
pass
with self.assertRaises(TypeError):
CacheModule1() # pylint: disable=abstract-class-instantiated
class CacheModule2(BaseCacheModule):
def get(self, key):
super(CacheModule2, self).get(key)
with self.assertRaises(TypeError):
CacheModule2() # pylint: disable=abstract-class-instantiated
def test_subclass_success(self):
class CacheModule3(BaseCacheModule):
def get(self, key):
super(CacheModule3, self).get(key)
def set(self, key, value):
super(CacheModule3, self).set(key, value)
def keys(self):
super(CacheModule3, self).keys()
def contains(self, key):
super(CacheModule3, self).contains(key)
def delete(self, key):
super(CacheModule3, self).delete(key)
def flush(self):
super(CacheModule3, self).flush()
def copy(self):
super(CacheModule3, self).copy()
self.assertIsInstance(CacheModule3(), CacheModule3)
def test_memory_cachemodule(self):
self.assertIsInstance(MemoryCache(), MemoryCache)
def test_memory_cachemodule_with_loader(self):
self.assertIsInstance(cache_loader.get('memory'), MemoryCache)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,142 |
inventory contains deprecated call to be removed in 2.12
|
##### SUMMARY
inventory contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/inventory/__init__.py:298:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:305:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:314:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/inventory/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74142
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:05Z |
python
| 2021-04-13T15:08:20Z |
changelogs/fragments/cache-deprecations.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,142 |
inventory contains deprecated call to be removed in 2.12
|
##### SUMMARY
inventory contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/inventory/__init__.py:298:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:305:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:314:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/inventory/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74142
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:05Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/plugins/cache/__init__.py
|
# (c) 2014, Michael DeHaan <[email protected]>
# (c) 2018, Ansible Project
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import os
import time
import errno
from abc import ABCMeta, abstractmethod
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six import with_metaclass
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins import AnsiblePlugin
from ansible.plugins.loader import cache_loader
from ansible.utils.collection_loader import resource_from_fqcr
from ansible.utils.display import Display
from ansible.vars.fact_cache import FactCache as RealFactCache
display = Display()
class FactCache(RealFactCache):
"""
This is for backwards compatibility. Will be removed after deprecation. It was removed as it
wasn't actually part of the cache plugin API. It's actually the code to make use of cache
plugins, not the cache plugin itself. Subclassing it wouldn't yield a usable Cache Plugin and
there was no facility to use it as anything else.
"""
def __init__(self, *args, **kwargs):
display.deprecated('ansible.plugins.cache.FactCache has been moved to'
' ansible.vars.fact_cache.FactCache. If you are looking for the class'
' to subclass for a cache plugin, you want'
' ansible.plugins.cache.BaseCacheModule or one of its subclasses.',
version='2.12', collection_name='ansible.builtin')
super(FactCache, self).__init__(*args, **kwargs)
class BaseCacheModule(AnsiblePlugin):
# Backwards compat only. Just import the global display instead
_display = display
def __init__(self, *args, **kwargs):
# Third party code is not using cache_loader to load plugin - fall back to previous behavior
if not hasattr(self, '_load_name'):
display.deprecated('Rather than importing custom CacheModules directly, use ansible.plugins.loader.cache_loader',
version='2.14', collection_name='ansible.builtin')
self._load_name = self.__module__.split('.')[-1]
self._load_name = resource_from_fqcr(self.__module__)
super(BaseCacheModule, self).__init__()
self.set_options(var_options=args, direct=kwargs)
@abstractmethod
def get(self, key):
pass
@abstractmethod
def set(self, key, value):
pass
@abstractmethod
def keys(self):
pass
@abstractmethod
def contains(self, key):
pass
@abstractmethod
def delete(self, key):
pass
@abstractmethod
def flush(self):
pass
@abstractmethod
def copy(self):
pass
class BaseFileCacheModule(BaseCacheModule):
"""
A caching module backed by file based storage.
"""
def __init__(self, *args, **kwargs):
try:
super(BaseFileCacheModule, self).__init__(*args, **kwargs)
self._cache_dir = self._get_cache_connection(self.get_option('_uri'))
self._timeout = float(self.get_option('_timeout'))
except KeyError:
self._cache_dir = self._get_cache_connection(C.CACHE_PLUGIN_CONNECTION)
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self.plugin_name = resource_from_fqcr(self.__module__)
self._cache = {}
self.validate_cache_connection()
def _get_cache_connection(self, source):
if source:
try:
return os.path.expanduser(os.path.expandvars(source))
except TypeError:
pass
def validate_cache_connection(self):
if not self._cache_dir:
raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option "
"to be set (to a writeable directory path)" % self.plugin_name)
if not os.path.exists(self._cache_dir):
try:
os.makedirs(self._cache_dir)
except (OSError, IOError) as e:
raise AnsibleError("error in '%s' cache plugin while trying to create cache dir %s : %s" % (self.plugin_name, self._cache_dir, to_bytes(e)))
else:
for x in (os.R_OK, os.W_OK, os.X_OK):
if not os.access(self._cache_dir, x):
raise AnsibleError("error in '%s' cache, configured path (%s) does not have necessary permissions (rwx), disabling plugin" % (
self.plugin_name, self._cache_dir))
def _get_cache_file_name(self, key):
prefix = self.get_option('_prefix')
if prefix:
cachefile = "%s/%s%s" % (self._cache_dir, prefix, key)
else:
cachefile = "%s/%s" % (self._cache_dir, key)
return cachefile
def get(self, key):
""" This checks the in memory cache first as the fact was not expired at 'gather time'
and it would be problematic if the key did expire after some long running tasks and
user gets 'undefined' error in the same play """
if key not in self._cache:
if self.has_expired(key) or key == "":
raise KeyError
cachefile = self._get_cache_file_name(key)
try:
value = self._load(cachefile)
self._cache[key] = value
except ValueError as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s. "
"Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
self.delete(key)
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. "
"It has been removed, so you can re-run your command now." % cachefile)
except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
raise KeyError
except Exception as e:
raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e)))
return self._cache.get(key)
def set(self, key, value):
self._cache[key] = value
cachefile = self._get_cache_file_name(key)
try:
self._dump(value, cachefile)
except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to write to %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
def has_expired(self, key):
if self._timeout == 0:
return False
cachefile = self._get_cache_file_name(key)
try:
st = os.stat(cachefile)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return False
else:
display.warning("error in '%s' cache plugin while trying to stat %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
return False
if time.time() - st.st_mtime <= self._timeout:
return False
if key in self._cache:
del self._cache[key]
return True
def keys(self):
keys = []
for k in os.listdir(self._cache_dir):
if not (k.startswith('.') or self.has_expired(k)):
keys.append(k)
return keys
def contains(self, key):
cachefile = self._get_cache_file_name(key)
if key in self._cache:
return True
if self.has_expired(key):
return False
try:
os.stat(cachefile)
return True
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return False
else:
display.warning("error in '%s' cache plugin while trying to stat %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
def delete(self, key):
try:
del self._cache[key]
except KeyError:
pass
try:
os.remove(self._get_cache_file_name(key))
except (OSError, IOError):
pass # TODO: only pass on non existing?
def flush(self):
self._cache = {}
for key in self.keys():
self.delete(key)
def copy(self):
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
@abstractmethod
def _load(self, filepath):
"""
Read data from a filepath and return it as a value
:arg filepath: The filepath to read from.
:returns: The value stored in the filepath
This method reads from the file on disk and takes care of any parsing
and transformation of the data before returning it. The value
returned should be what Ansible would expect if it were uncached data.
.. note:: Filehandles have advantages but calling code doesn't know
whether this file is text or binary, should be decoded, or accessed via
a library function. Therefore the API uses a filepath and opens
the file inside of the method.
"""
pass
@abstractmethod
def _dump(self, value, filepath):
"""
Write data to a filepath
:arg value: The value to store
:arg filepath: The filepath to store it at
"""
pass
class CachePluginAdjudicator(MutableMapping):
"""
Intermediary between a cache dictionary and a CacheModule
"""
def __init__(self, plugin_name='memory', **kwargs):
self._cache = {}
self._retrieved = {}
self._plugin = cache_loader.get(plugin_name, **kwargs)
if not self._plugin:
raise AnsibleError('Unable to load the cache plugin (%s).' % plugin_name)
self._plugin_name = plugin_name
def update_cache_if_changed(self):
if self._retrieved != self._cache:
self.set_cache()
def set_cache(self):
for top_level_cache_key in self._cache.keys():
self._plugin.set(top_level_cache_key, self._cache[top_level_cache_key])
self._retrieved = copy.deepcopy(self._cache)
def load_whole_cache(self):
for key in self._plugin.keys():
self._cache[key] = self._plugin.get(key)
def __repr__(self):
return to_text(self._cache)
def __iter__(self):
return iter(self.keys())
def __len__(self):
return len(self.keys())
def _do_load_key(self, key):
load = False
if all([
key not in self._cache,
key not in self._retrieved,
self._plugin_name != 'memory',
self._plugin.contains(key),
]):
load = True
return load
def __getitem__(self, key):
if self._do_load_key(key):
try:
self._cache[key] = self._plugin.get(key)
except KeyError:
pass
else:
self._retrieved[key] = self._cache[key]
return self._cache[key]
def get(self, key, default=None):
if self._do_load_key(key):
try:
self._cache[key] = self._plugin.get(key)
except KeyError as e:
pass
else:
self._retrieved[key] = self._cache[key]
return self._cache.get(key, default)
def items(self):
return self._cache.items()
def values(self):
return self._cache.values()
def keys(self):
return self._cache.keys()
def pop(self, key, *args):
if args:
return self._cache.pop(key, args[0])
return self._cache.pop(key)
def __delitem__(self, key):
del self._cache[key]
def __setitem__(self, key, value):
self._cache[key] = value
def flush(self):
self._plugin.flush()
self._cache = {}
def update(self, value):
self._cache.update(value)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,142 |
inventory contains deprecated call to be removed in 2.12
|
##### SUMMARY
inventory contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/inventory/__init__.py:298:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:305:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:314:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/inventory/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74142
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:05Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/plugins/inventory/__init__.py
|
# (c) 2017, Red Hat, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import hashlib
import os
import string
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.inventory.group import to_safe_group_name as original_safe
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins import AnsiblePlugin
from ansible.plugins.cache import CachePluginAdjudicator as CacheObject
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.common._collections_compat import Mapping
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import string_types
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars, load_extra_vars
display = Display()
# Helper methods
def to_safe_group_name(name):
# placeholder for backwards compat
return original_safe(name, force=True, silent=True)
def detect_range(line=None):
'''
A helper function that checks a given host line to see if it contains
a range pattern described in the docstring above.
Returns True if the given line contains a pattern, else False.
'''
return '[' in line
def expand_hostname_range(line=None):
'''
A helper function that expands a given line that contains a pattern
specified in top docstring, and returns a list that consists of the
expanded version.
The '[' and ']' characters are used to maintain the pseudo-code
appearance. They are replaced in this function with '|' to ease
string splitting.
References: https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html#hosts-and-groups
'''
all_hosts = []
if line:
# A hostname such as db[1:6]-node is considered to consists
# three parts:
# head: 'db'
# nrange: [1:6]; range() is a built-in. Can't use the name
# tail: '-node'
# Add support for multiple ranges in a host so:
# db[01:10:3]node-[01:10]
# - to do this we split off at the first [...] set, getting the list
# of hosts and then repeat until none left.
# - also add an optional third parameter which contains the step. (Default: 1)
# so range can be [01:10:2] -> 01 03 05 07 09
(head, nrange, tail) = line.replace('[', '|', 1).replace(']', '|', 1).split('|')
bounds = nrange.split(":")
if len(bounds) != 2 and len(bounds) != 3:
raise AnsibleError("host range must be begin:end or begin:end:step")
beg = bounds[0]
end = bounds[1]
if len(bounds) == 2:
step = 1
else:
step = bounds[2]
if not beg:
beg = "0"
if not end:
raise AnsibleError("host range must specify end value")
if beg[0] == '0' and len(beg) > 1:
rlen = len(beg) # range length formatting hint
if rlen != len(end):
raise AnsibleError("host range must specify equal-length begin and end formats")
def fill(x):
return str(x).zfill(rlen) # range sequence
else:
fill = str
try:
i_beg = string.ascii_letters.index(beg)
i_end = string.ascii_letters.index(end)
if i_beg > i_end:
raise AnsibleError("host range must have begin <= end")
seq = list(string.ascii_letters[i_beg:i_end + 1:int(step)])
except ValueError: # not an alpha range
seq = range(int(beg), int(end) + 1, int(step))
for rseq in seq:
hname = ''.join((head, fill(rseq), tail))
if detect_range(hname):
all_hosts.extend(expand_hostname_range(hname))
else:
all_hosts.append(hname)
return all_hosts
def get_cache_plugin(plugin_name, **kwargs):
try:
cache = CacheObject(plugin_name, **kwargs)
except AnsibleError as e:
if 'fact_caching_connection' in to_native(e):
raise AnsibleError("error, '%s' inventory cache plugin requires the one of the following to be set "
"to a writeable directory path:\nansible.cfg:\n[default]: fact_caching_connection,\n"
"[inventory]: cache_connection;\nEnvironment:\nANSIBLE_INVENTORY_CACHE_CONNECTION,\n"
"ANSIBLE_CACHE_PLUGIN_CONNECTION." % plugin_name)
else:
raise e
if plugin_name != 'memory' and kwargs and not getattr(cache._plugin, '_options', None):
raise AnsibleError('Unable to use cache plugin {0} for inventory. Cache options were provided but may not reconcile '
'correctly unless set via set_options. Refer to the porting guide if the plugin derives user settings '
'from ansible.constants.'.format(plugin_name))
return cache
class BaseInventoryPlugin(AnsiblePlugin):
""" Parses an Inventory Source"""
TYPE = 'generator'
# 3rd party plugins redefine this to
# use custom group name sanitization
# since constructed features enforce
# it by default.
_sanitize_group_name = staticmethod(to_safe_group_name)
def __init__(self):
super(BaseInventoryPlugin, self).__init__()
self._options = {}
self.inventory = None
self.display = display
self._vars = {}
def parse(self, inventory, loader, path, cache=True):
''' Populates inventory from the given data. Raises an error on any parse failure
:arg inventory: a copy of the previously accumulated inventory data,
to be updated with any new data this plugin provides.
The inventory can be empty if no other source/plugin ran successfully.
:arg loader: a reference to the DataLoader, which can read in YAML and JSON files,
it also has Vault support to automatically decrypt files.
:arg path: the string that represents the 'inventory source',
normally a path to a configuration file for this inventory,
but it can also be a raw string for this plugin to consume
:arg cache: a boolean that indicates if the plugin should use the cache or not
you can ignore if this plugin does not implement caching.
'''
self.loader = loader
self.inventory = inventory
self.templar = Templar(loader=loader)
self._vars = load_extra_vars(loader)
def verify_file(self, path):
''' Verify if file is usable by this plugin, base does minimal accessibility check
:arg path: a string that was passed as an inventory source,
it normally is a path to a config file, but this is not a requirement,
it can also be parsed itself as the inventory data to process.
So only call this base class if you expect it to be a file.
'''
valid = False
b_path = to_bytes(path, errors='surrogate_or_strict')
if (os.path.exists(b_path) and os.access(b_path, os.R_OK)):
valid = True
else:
self.display.vvv('Skipping due to inventory source not existing or not being readable by the current user')
return valid
def _populate_host_vars(self, hosts, variables, group=None, port=None):
if not isinstance(variables, Mapping):
raise AnsibleParserError("Invalid data from file, expected dictionary and got:\n\n%s" % to_native(variables))
for host in hosts:
self.inventory.add_host(host, group=group, port=port)
for k in variables:
self.inventory.set_variable(host, k, variables[k])
def _read_config_data(self, path):
''' validate config and set options as appropriate
:arg path: path to common yaml format config file for this plugin
'''
config = {}
try:
# avoid loader cache so meta: refresh_inventory can pick up config changes
# if we read more than once, fs cache should be good enough
config = self.loader.load_from_file(path, cache=False)
except Exception as e:
raise AnsibleParserError(to_native(e))
# a plugin can be loaded via many different names with redirection- if so, we want to accept any of those names
valid_names = getattr(self, '_redirected_names') or [self.NAME]
if not config:
# no data
raise AnsibleParserError("%s is empty" % (to_native(path)))
elif config.get('plugin') not in valid_names:
# this is not my config file
raise AnsibleParserError("Incorrect plugin name in file: %s" % config.get('plugin', 'none found'))
elif not isinstance(config, Mapping):
# configs are dictionaries
raise AnsibleParserError('inventory source has invalid structure, it should be a dictionary, got: %s' % type(config))
self.set_options(direct=config, var_options=self._vars)
if 'cache' in self._options and self.get_option('cache'):
cache_option_keys = [('_uri', 'cache_connection'), ('_timeout', 'cache_timeout'), ('_prefix', 'cache_prefix')]
cache_options = dict((opt[0], self.get_option(opt[1])) for opt in cache_option_keys if self.get_option(opt[1]) is not None)
self._cache = get_cache_plugin(self.get_option('cache_plugin'), **cache_options)
return config
def _consume_options(self, data):
''' update existing options from alternate configuration sources not normally used by Ansible.
Many API libraries already have existing configuration sources, this allows plugin author to leverage them.
:arg data: key/value pairs that correspond to configuration options for this plugin
'''
for k in self._options:
if k in data:
self._options[k] = data.pop(k)
def _expand_hostpattern(self, hostpattern):
'''
Takes a single host pattern and returns a list of hostnames and an
optional port number that applies to all of them.
'''
# Can the given hostpattern be parsed as a host with an optional port
# specification?
try:
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
except Exception:
# not a recognizable host pattern
pattern = hostpattern
port = None
# Once we have separated the pattern, we expand it into list of one or
# more hostnames, depending on whether it contains any [x:y] ranges.
if detect_range(pattern):
hostnames = expand_hostname_range(pattern)
else:
hostnames = [pattern]
return (hostnames, port)
class BaseFileInventoryPlugin(BaseInventoryPlugin):
""" Parses a File based Inventory Source"""
TYPE = 'storage'
def __init__(self):
super(BaseFileInventoryPlugin, self).__init__()
class DeprecatedCache(object):
def __init__(self, real_cacheable):
self.real_cacheable = real_cacheable
def get(self, key):
display.deprecated('InventoryModule should utilize self._cache as a dict instead of self.cache. '
'When expecting a KeyError, use self._cache[key] instead of using self.cache.get(key). '
'self._cache is a dictionary and will return a default value instead of raising a KeyError '
'when the key does not exist', version='2.12', collection_name='ansible.builtin')
return self.real_cacheable._cache[key]
def set(self, key, value):
display.deprecated('InventoryModule should utilize self._cache as a dict instead of self.cache. '
'To set the self._cache dictionary, use self._cache[key] = value instead of self.cache.set(key, value). '
'To force update the underlying cache plugin with the contents of self._cache before parse() is complete, '
'call self.set_cache_plugin and it will use the self._cache dictionary to update the cache plugin',
version='2.12', collection_name='ansible.builtin')
self.real_cacheable._cache[key] = value
self.real_cacheable.set_cache_plugin()
def __getattr__(self, name):
display.deprecated('InventoryModule should utilize self._cache instead of self.cache',
version='2.12', collection_name='ansible.builtin')
return self.real_cacheable._cache.__getattribute__(name)
class Cacheable(object):
_cache = CacheObject()
@property
def cache(self):
return DeprecatedCache(self)
def load_cache_plugin(self):
plugin_name = self.get_option('cache_plugin')
cache_option_keys = [('_uri', 'cache_connection'), ('_timeout', 'cache_timeout'), ('_prefix', 'cache_prefix')]
cache_options = dict((opt[0], self.get_option(opt[1])) for opt in cache_option_keys if self.get_option(opt[1]) is not None)
self._cache = get_cache_plugin(plugin_name, **cache_options)
def get_cache_key(self, path):
return "{0}_{1}".format(self.NAME, self._get_cache_prefix(path))
def _get_cache_prefix(self, path):
''' create predictable unique prefix for plugin/inventory '''
m = hashlib.sha1()
m.update(to_bytes(self.NAME, errors='surrogate_or_strict'))
d1 = m.hexdigest()
n = hashlib.sha1()
n.update(to_bytes(path, errors='surrogate_or_strict'))
d2 = n.hexdigest()
return 's_'.join([d1[:5], d2[:5]])
def clear_cache(self):
self._cache.flush()
def update_cache_if_changed(self):
self._cache.update_cache_if_changed()
def set_cache_plugin(self):
self._cache.set_cache()
class Constructable(object):
def _compose(self, template, variables):
''' helper method for plugins to compose variables for Ansible based on jinja2 expression and inventory vars'''
t = self.templar
try:
use_extra = self.get_option('use_extra_vars')
except Exception:
use_extra = False
if use_extra:
t.available_variables = combine_vars(variables, self._vars)
else:
t.available_variables = variables
return t.template('%s%s%s' % (t.environment.variable_start_string, template, t.environment.variable_end_string), disable_lookups=True)
def _set_composite_vars(self, compose, variables, host, strict=False):
''' loops over compose entries to create vars for hosts '''
if compose and isinstance(compose, dict):
for varname in compose:
try:
composite = self._compose(compose[varname], variables)
except Exception as e:
if strict:
raise AnsibleError("Could not set %s for host %s: %s" % (varname, host, to_native(e)))
continue
self.inventory.set_variable(host, varname, composite)
def _add_host_to_composed_groups(self, groups, variables, host, strict=False, fetch_hostvars=True):
''' helper to create complex groups for plugins based on jinja2 conditionals, hosts that meet the conditional are added to group'''
# process each 'group entry'
if groups and isinstance(groups, dict):
if fetch_hostvars:
variables = combine_vars(variables, self.inventory.get_host(host).get_vars())
self.templar.available_variables = variables
for group_name in groups:
conditional = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % groups[group_name]
group_name = self._sanitize_group_name(group_name)
try:
result = boolean(self.templar.template(conditional))
except Exception as e:
if strict:
raise AnsibleParserError("Could not add host %s to group %s: %s" % (host, group_name, to_native(e)))
continue
if result:
# ensure group exists, use sanitized name
group_name = self.inventory.add_group(group_name)
# add host to group
self.inventory.add_child(group_name, host)
def _add_host_to_keyed_groups(self, keys, variables, host, strict=False, fetch_hostvars=True):
''' helper to create groups for plugins based on variable values and add the corresponding hosts to it'''
if keys and isinstance(keys, list):
for keyed in keys:
if keyed and isinstance(keyed, dict):
if fetch_hostvars:
variables = combine_vars(variables, self.inventory.get_host(host).get_vars())
try:
key = self._compose(keyed.get('key'), variables)
except Exception as e:
if strict:
raise AnsibleParserError("Could not generate group for host %s from %s entry: %s" % (host, keyed.get('key'), to_native(e)))
continue
if key:
prefix = keyed.get('prefix', '')
sep = keyed.get('separator', '_')
raw_parent_name = keyed.get('parent_group', None)
if raw_parent_name:
try:
raw_parent_name = self.templar.template(raw_parent_name)
except AnsibleError as e:
if strict:
raise AnsibleParserError("Could not generate parent group %s for group %s: %s" % (raw_parent_name, key, to_native(e)))
continue
new_raw_group_names = []
if isinstance(key, string_types):
new_raw_group_names.append(key)
elif isinstance(key, list):
for name in key:
new_raw_group_names.append(name)
elif isinstance(key, Mapping):
for (gname, gval) in key.items():
name = '%s%s%s' % (gname, sep, gval)
new_raw_group_names.append(name)
else:
raise AnsibleParserError("Invalid group name format, expected a string or a list of them or dictionary, got: %s" % type(key))
for bare_name in new_raw_group_names:
if prefix == '' and self.get_option('leading_separator') is False:
sep = ''
gname = self._sanitize_group_name('%s%s%s' % (prefix, sep, bare_name))
result_gname = self.inventory.add_group(gname)
self.inventory.add_host(host, result_gname)
if raw_parent_name:
parent_name = self._sanitize_group_name(raw_parent_name)
self.inventory.add_group(parent_name)
self.inventory.add_child(parent_name, result_gname)
else:
# exclude case of empty list and dictionary, because these are valid constructions
# simply no groups need to be constructed, but are still falsy
if strict and key not in ([], {}):
raise AnsibleParserError("No key or key resulted empty for %s in host %s, invalid entry" % (keyed.get('key'), host))
else:
raise AnsibleParserError("Invalid keyed group entry, it must be a dictionary: %s " % keyed)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,142 |
inventory contains deprecated call to be removed in 2.12
|
##### SUMMARY
inventory contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/inventory/__init__.py:298:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:305:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:314:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/inventory/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74142
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:05Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/vars/fact_cache.py
|
# Copyright: (c) 2014, Michael DeHaan <[email protected]>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins.loader import cache_loader
from ansible.utils.display import Display
display = Display()
class FactCache(MutableMapping):
def __init__(self, *args, **kwargs):
self._plugin = cache_loader.get(C.CACHE_PLUGIN)
if not self._plugin:
raise AnsibleError('Unable to load the facts cache plugin (%s).' % (C.CACHE_PLUGIN))
super(FactCache, self).__init__(*args, **kwargs)
def __getitem__(self, key):
if not self._plugin.contains(key):
raise KeyError
return self._plugin.get(key)
def __setitem__(self, key, value):
self._plugin.set(key, value)
def __delitem__(self, key):
self._plugin.delete(key)
def __contains__(self, key):
return self._plugin.contains(key)
def __iter__(self):
return iter(self._plugin.keys())
def __len__(self):
return len(self._plugin.keys())
def copy(self):
""" Return a primitive copy of the keys and values from the cache. """
return dict(self)
def keys(self):
return self._plugin.keys()
def flush(self):
""" Flush the fact cache of all keys. """
self._plugin.flush()
def first_order_merge(self, key, value):
host_facts = {key: value}
try:
host_cache = self._plugin.get(key)
if host_cache:
host_cache.update(value)
host_facts[key] = host_cache
except KeyError:
pass
super(FactCache, self).update(host_facts)
def update(self, *args):
"""
Backwards compat shim
We thought we needed this to ensure we always called the plugin's set() method but
MutableMapping.update() will call our __setitem__() just fine. It's the calls to update
that we need to be careful of. This contains a bug::
fact_cache[host.name].update(facts)
It retrieves a *copy* of the facts for host.name and then updates the copy. So the changes
aren't persisted.
Instead we need to do::
fact_cache.update({host.name, facts})
Which will use FactCache's update() method.
We currently need this shim for backwards compat because the update() method that we had
implemented took key and value as arguments instead of taking a dict. We can remove the
shim in 2.12 as MutableMapping.update() should do everything that we need.
"""
if len(args) == 2:
# Deprecated. Call the new function with this name
display.deprecated('Calling FactCache().update(key, value) is deprecated. Use'
' FactCache().first_order_merge(key, value) if you want the old'
' behaviour or use FactCache().update({key: value}) if you want'
' dict-like behaviour.', version='2.12', collection_name='ansible.builtin')
return self.first_order_merge(*args)
elif len(args) == 1:
host_facts = args[0]
else:
raise TypeError('update expected at most 1 argument, got {0}'.format(len(args)))
super(FactCache, self).update(host_facts)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,142 |
inventory contains deprecated call to be removed in 2.12
|
##### SUMMARY
inventory contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/inventory/__init__.py:298:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:305:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:314:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/inventory/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74142
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:05Z |
python
| 2021-04-13T15:08:20Z |
test/sanity/ignore.txt
|
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
lib/ansible/cli/console.py pylint:blacklisted-name
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/cli/scripts/ansible_cli_stub.py pylint:ansible-deprecated-version
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/pycompat24.py no-get-exception
lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py no-basestring
lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
lib/ansible/module_utils/urls.py pylint:blacklisted-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
lib/ansible/modules/async_wrapper.py use-argspec-type-path
lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
lib/ansible/modules/copy.py pylint:blacklisted-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
lib/ansible/modules/git.py pylint:blacklisted-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/iptables.py pylint:blacklisted-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/pip.py pylint:blacklisted-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/uri.py pylint:blacklisted-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:blacklisted-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/conditional.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/play_context.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/async_status.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/inventory/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/script.py pylint:ansible-deprecated-version
lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
lib/ansible/plugins/strategy/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
lib/ansible/vars/fact_cache.py pylint:ansible-deprecated-version
lib/ansible/vars/hostvars.py pylint:blacklisted-name
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-argument-not-valid-type
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/integration/plugins/module_utils/database.py future-import-boilerplate
test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:blacklisted-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
test/units/modules/test_apt.py pylint:blacklisted-name
test/units/parsing/vault/test_vault.py pylint:blacklisted-name
test/units/playbook/role/test_role.py pylint:blacklisted-name
test/units/plugins/test_plugins.py pylint:blacklisted-name
test/units/template/test_templar.py pylint:blacklisted-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
test/utils/shippable/check_matrix.py replace-urlopen
test/utils/shippable/timing.py shebang
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,142 |
inventory contains deprecated call to be removed in 2.12
|
##### SUMMARY
inventory contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/inventory/__init__.py:298:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:305:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
lib/ansible/plugins/inventory/__init__.py:314:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/inventory/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74142
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:05Z |
python
| 2021-04-13T15:08:20Z |
test/units/plugins/cache/test_cache.py
|
# (c) 2012-2015, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest, mock
from ansible.errors import AnsibleError
from ansible.plugins.cache import FactCache, CachePluginAdjudicator
from ansible.plugins.cache.base import BaseCacheModule
from ansible.plugins.cache.memory import CacheModule as MemoryCache
from ansible.plugins.loader import cache_loader
import pytest
class TestCachePluginAdjudicator(unittest.TestCase):
def setUp(self):
# memory plugin cache
self.cache = CachePluginAdjudicator()
self.cache['cache_key'] = {'key1': 'value1', 'key2': 'value2'}
self.cache['cache_key_2'] = {'key': 'value'}
def test___setitem__(self):
self.cache['new_cache_key'] = {'new_key1': ['new_value1', 'new_value2']}
assert self.cache['new_cache_key'] == {'new_key1': ['new_value1', 'new_value2']}
def test_inner___setitem__(self):
self.cache['new_cache_key'] = {'new_key1': ['new_value1', 'new_value2']}
self.cache['new_cache_key']['new_key1'][0] = 'updated_value1'
assert self.cache['new_cache_key'] == {'new_key1': ['updated_value1', 'new_value2']}
def test___contains__(self):
assert 'cache_key' in self.cache
assert 'not_cache_key' not in self.cache
def test_get(self):
assert self.cache.get('cache_key') == {'key1': 'value1', 'key2': 'value2'}
def test_get_with_default(self):
assert self.cache.get('foo', 'bar') == 'bar'
def test_get_without_default(self):
assert self.cache.get('foo') is None
def test___getitem__(self):
with pytest.raises(KeyError) as err:
self.cache['foo']
def test_pop_with_default(self):
assert self.cache.pop('foo', 'bar') == 'bar'
def test_pop_without_default(self):
with pytest.raises(KeyError) as err:
assert self.cache.pop('foo')
def test_pop(self):
v = self.cache.pop('cache_key_2')
assert v == {'key': 'value'}
assert 'cache_key_2' not in self.cache
def test_update(self):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_flush(self):
# Fake that the cache already has some data in it but the adjudicator
# hasn't loaded it in.
self.cache._plugin.set('monkey', 'animal')
self.cache._plugin.set('wolf', 'animal')
self.cache._plugin.set('another wolf', 'another animal')
# The adjudicator does't know about the new entries
assert len(self.cache) == 2
# But the cache itself does
assert len(self.cache._plugin._cache) == 3
# If we call flush, both the adjudicator and the cache should flush
self.cache.flush()
assert len(self.cache) == 0
assert len(self.cache._plugin._cache) == 0
class TestFactCache(unittest.TestCase):
def setUp(self):
with mock.patch('ansible.constants.CACHE_PLUGIN', 'memory'):
self.cache = FactCache()
def test_copy(self):
self.cache['avocado'] = 'fruit'
self.cache['daisy'] = 'flower'
a_copy = self.cache.copy()
self.assertEqual(type(a_copy), dict)
self.assertEqual(a_copy, dict(avocado='fruit', daisy='flower'))
def test_plugin_load_failure(self):
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):
self.assertRaisesRegexp(AnsibleError,
"Unable to load the facts cache plugin.*json.*",
FactCache)
def test_update(self):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_update_legacy(self):
self.cache.update('cache_key', {'key2': 'updatedvalue'})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_update_legacy_key_exists(self):
self.cache['cache_key'] = {'key': 'value', 'key2': 'value2'}
self.cache.update('cache_key', {'key': 'updatedvalue'})
assert self.cache['cache_key']['key'] == 'updatedvalue'
assert self.cache['cache_key']['key2'] == 'value2'
class TestAbstractClass(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_subclass_error(self):
class CacheModule1(BaseCacheModule):
pass
with self.assertRaises(TypeError):
CacheModule1() # pylint: disable=abstract-class-instantiated
class CacheModule2(BaseCacheModule):
def get(self, key):
super(CacheModule2, self).get(key)
with self.assertRaises(TypeError):
CacheModule2() # pylint: disable=abstract-class-instantiated
def test_subclass_success(self):
class CacheModule3(BaseCacheModule):
def get(self, key):
super(CacheModule3, self).get(key)
def set(self, key, value):
super(CacheModule3, self).set(key, value)
def keys(self):
super(CacheModule3, self).keys()
def contains(self, key):
super(CacheModule3, self).contains(key)
def delete(self, key):
super(CacheModule3, self).delete(key)
def flush(self):
super(CacheModule3, self).flush()
def copy(self):
super(CacheModule3, self).copy()
self.assertIsInstance(CacheModule3(), CacheModule3)
def test_memory_cachemodule(self):
self.assertIsInstance(MemoryCache(), MemoryCache)
def test_memory_cachemodule_with_loader(self):
self.assertIsInstance(cache_loader.get('memory'), MemoryCache)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,140 |
cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/cache/__init__.py:49:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/cache/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74140
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:03Z |
python
| 2021-04-13T15:08:20Z |
changelogs/fragments/cache-deprecations.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,140 |
cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/cache/__init__.py:49:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/cache/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74140
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:03Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/plugins/cache/__init__.py
|
# (c) 2014, Michael DeHaan <[email protected]>
# (c) 2018, Ansible Project
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import os
import time
import errno
from abc import ABCMeta, abstractmethod
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six import with_metaclass
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins import AnsiblePlugin
from ansible.plugins.loader import cache_loader
from ansible.utils.collection_loader import resource_from_fqcr
from ansible.utils.display import Display
from ansible.vars.fact_cache import FactCache as RealFactCache
display = Display()
class FactCache(RealFactCache):
"""
This is for backwards compatibility. Will be removed after deprecation. It was removed as it
wasn't actually part of the cache plugin API. It's actually the code to make use of cache
plugins, not the cache plugin itself. Subclassing it wouldn't yield a usable Cache Plugin and
there was no facility to use it as anything else.
"""
def __init__(self, *args, **kwargs):
display.deprecated('ansible.plugins.cache.FactCache has been moved to'
' ansible.vars.fact_cache.FactCache. If you are looking for the class'
' to subclass for a cache plugin, you want'
' ansible.plugins.cache.BaseCacheModule or one of its subclasses.',
version='2.12', collection_name='ansible.builtin')
super(FactCache, self).__init__(*args, **kwargs)
class BaseCacheModule(AnsiblePlugin):
# Backwards compat only. Just import the global display instead
_display = display
def __init__(self, *args, **kwargs):
# Third party code is not using cache_loader to load plugin - fall back to previous behavior
if not hasattr(self, '_load_name'):
display.deprecated('Rather than importing custom CacheModules directly, use ansible.plugins.loader.cache_loader',
version='2.14', collection_name='ansible.builtin')
self._load_name = self.__module__.split('.')[-1]
self._load_name = resource_from_fqcr(self.__module__)
super(BaseCacheModule, self).__init__()
self.set_options(var_options=args, direct=kwargs)
@abstractmethod
def get(self, key):
pass
@abstractmethod
def set(self, key, value):
pass
@abstractmethod
def keys(self):
pass
@abstractmethod
def contains(self, key):
pass
@abstractmethod
def delete(self, key):
pass
@abstractmethod
def flush(self):
pass
@abstractmethod
def copy(self):
pass
class BaseFileCacheModule(BaseCacheModule):
"""
A caching module backed by file based storage.
"""
def __init__(self, *args, **kwargs):
try:
super(BaseFileCacheModule, self).__init__(*args, **kwargs)
self._cache_dir = self._get_cache_connection(self.get_option('_uri'))
self._timeout = float(self.get_option('_timeout'))
except KeyError:
self._cache_dir = self._get_cache_connection(C.CACHE_PLUGIN_CONNECTION)
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self.plugin_name = resource_from_fqcr(self.__module__)
self._cache = {}
self.validate_cache_connection()
def _get_cache_connection(self, source):
if source:
try:
return os.path.expanduser(os.path.expandvars(source))
except TypeError:
pass
def validate_cache_connection(self):
if not self._cache_dir:
raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option "
"to be set (to a writeable directory path)" % self.plugin_name)
if not os.path.exists(self._cache_dir):
try:
os.makedirs(self._cache_dir)
except (OSError, IOError) as e:
raise AnsibleError("error in '%s' cache plugin while trying to create cache dir %s : %s" % (self.plugin_name, self._cache_dir, to_bytes(e)))
else:
for x in (os.R_OK, os.W_OK, os.X_OK):
if not os.access(self._cache_dir, x):
raise AnsibleError("error in '%s' cache, configured path (%s) does not have necessary permissions (rwx), disabling plugin" % (
self.plugin_name, self._cache_dir))
def _get_cache_file_name(self, key):
prefix = self.get_option('_prefix')
if prefix:
cachefile = "%s/%s%s" % (self._cache_dir, prefix, key)
else:
cachefile = "%s/%s" % (self._cache_dir, key)
return cachefile
def get(self, key):
""" This checks the in memory cache first as the fact was not expired at 'gather time'
and it would be problematic if the key did expire after some long running tasks and
user gets 'undefined' error in the same play """
if key not in self._cache:
if self.has_expired(key) or key == "":
raise KeyError
cachefile = self._get_cache_file_name(key)
try:
value = self._load(cachefile)
self._cache[key] = value
except ValueError as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s. "
"Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
self.delete(key)
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. "
"It has been removed, so you can re-run your command now." % cachefile)
except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
raise KeyError
except Exception as e:
raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e)))
return self._cache.get(key)
def set(self, key, value):
self._cache[key] = value
cachefile = self._get_cache_file_name(key)
try:
self._dump(value, cachefile)
except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to write to %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
def has_expired(self, key):
if self._timeout == 0:
return False
cachefile = self._get_cache_file_name(key)
try:
st = os.stat(cachefile)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return False
else:
display.warning("error in '%s' cache plugin while trying to stat %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
return False
if time.time() - st.st_mtime <= self._timeout:
return False
if key in self._cache:
del self._cache[key]
return True
def keys(self):
keys = []
for k in os.listdir(self._cache_dir):
if not (k.startswith('.') or self.has_expired(k)):
keys.append(k)
return keys
def contains(self, key):
cachefile = self._get_cache_file_name(key)
if key in self._cache:
return True
if self.has_expired(key):
return False
try:
os.stat(cachefile)
return True
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return False
else:
display.warning("error in '%s' cache plugin while trying to stat %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
def delete(self, key):
try:
del self._cache[key]
except KeyError:
pass
try:
os.remove(self._get_cache_file_name(key))
except (OSError, IOError):
pass # TODO: only pass on non existing?
def flush(self):
self._cache = {}
for key in self.keys():
self.delete(key)
def copy(self):
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
@abstractmethod
def _load(self, filepath):
"""
Read data from a filepath and return it as a value
:arg filepath: The filepath to read from.
:returns: The value stored in the filepath
This method reads from the file on disk and takes care of any parsing
and transformation of the data before returning it. The value
returned should be what Ansible would expect if it were uncached data.
.. note:: Filehandles have advantages but calling code doesn't know
whether this file is text or binary, should be decoded, or accessed via
a library function. Therefore the API uses a filepath and opens
the file inside of the method.
"""
pass
@abstractmethod
def _dump(self, value, filepath):
"""
Write data to a filepath
:arg value: The value to store
:arg filepath: The filepath to store it at
"""
pass
class CachePluginAdjudicator(MutableMapping):
"""
Intermediary between a cache dictionary and a CacheModule
"""
def __init__(self, plugin_name='memory', **kwargs):
self._cache = {}
self._retrieved = {}
self._plugin = cache_loader.get(plugin_name, **kwargs)
if not self._plugin:
raise AnsibleError('Unable to load the cache plugin (%s).' % plugin_name)
self._plugin_name = plugin_name
def update_cache_if_changed(self):
if self._retrieved != self._cache:
self.set_cache()
def set_cache(self):
for top_level_cache_key in self._cache.keys():
self._plugin.set(top_level_cache_key, self._cache[top_level_cache_key])
self._retrieved = copy.deepcopy(self._cache)
def load_whole_cache(self):
for key in self._plugin.keys():
self._cache[key] = self._plugin.get(key)
def __repr__(self):
return to_text(self._cache)
def __iter__(self):
return iter(self.keys())
def __len__(self):
return len(self.keys())
def _do_load_key(self, key):
load = False
if all([
key not in self._cache,
key not in self._retrieved,
self._plugin_name != 'memory',
self._plugin.contains(key),
]):
load = True
return load
def __getitem__(self, key):
if self._do_load_key(key):
try:
self._cache[key] = self._plugin.get(key)
except KeyError:
pass
else:
self._retrieved[key] = self._cache[key]
return self._cache[key]
def get(self, key, default=None):
if self._do_load_key(key):
try:
self._cache[key] = self._plugin.get(key)
except KeyError as e:
pass
else:
self._retrieved[key] = self._cache[key]
return self._cache.get(key, default)
def items(self):
return self._cache.items()
def values(self):
return self._cache.values()
def keys(self):
return self._cache.keys()
def pop(self, key, *args):
if args:
return self._cache.pop(key, args[0])
return self._cache.pop(key)
def __delitem__(self, key):
del self._cache[key]
def __setitem__(self, key, value):
self._cache[key] = value
def flush(self):
self._plugin.flush()
self._cache = {}
def update(self, value):
self._cache.update(value)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,140 |
cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/cache/__init__.py:49:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/cache/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74140
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:03Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/plugins/inventory/__init__.py
|
# (c) 2017, Red Hat, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import hashlib
import os
import string
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.inventory.group import to_safe_group_name as original_safe
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins import AnsiblePlugin
from ansible.plugins.cache import CachePluginAdjudicator as CacheObject
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.common._collections_compat import Mapping
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import string_types
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars, load_extra_vars
display = Display()
# Helper methods
def to_safe_group_name(name):
# placeholder for backwards compat
return original_safe(name, force=True, silent=True)
def detect_range(line=None):
'''
A helper function that checks a given host line to see if it contains
a range pattern described in the docstring above.
Returns True if the given line contains a pattern, else False.
'''
return '[' in line
def expand_hostname_range(line=None):
'''
A helper function that expands a given line that contains a pattern
specified in top docstring, and returns a list that consists of the
expanded version.
The '[' and ']' characters are used to maintain the pseudo-code
appearance. They are replaced in this function with '|' to ease
string splitting.
References: https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html#hosts-and-groups
'''
all_hosts = []
if line:
# A hostname such as db[1:6]-node is considered to consists
# three parts:
# head: 'db'
# nrange: [1:6]; range() is a built-in. Can't use the name
# tail: '-node'
# Add support for multiple ranges in a host so:
# db[01:10:3]node-[01:10]
# - to do this we split off at the first [...] set, getting the list
# of hosts and then repeat until none left.
# - also add an optional third parameter which contains the step. (Default: 1)
# so range can be [01:10:2] -> 01 03 05 07 09
(head, nrange, tail) = line.replace('[', '|', 1).replace(']', '|', 1).split('|')
bounds = nrange.split(":")
if len(bounds) != 2 and len(bounds) != 3:
raise AnsibleError("host range must be begin:end or begin:end:step")
beg = bounds[0]
end = bounds[1]
if len(bounds) == 2:
step = 1
else:
step = bounds[2]
if not beg:
beg = "0"
if not end:
raise AnsibleError("host range must specify end value")
if beg[0] == '0' and len(beg) > 1:
rlen = len(beg) # range length formatting hint
if rlen != len(end):
raise AnsibleError("host range must specify equal-length begin and end formats")
def fill(x):
return str(x).zfill(rlen) # range sequence
else:
fill = str
try:
i_beg = string.ascii_letters.index(beg)
i_end = string.ascii_letters.index(end)
if i_beg > i_end:
raise AnsibleError("host range must have begin <= end")
seq = list(string.ascii_letters[i_beg:i_end + 1:int(step)])
except ValueError: # not an alpha range
seq = range(int(beg), int(end) + 1, int(step))
for rseq in seq:
hname = ''.join((head, fill(rseq), tail))
if detect_range(hname):
all_hosts.extend(expand_hostname_range(hname))
else:
all_hosts.append(hname)
return all_hosts
def get_cache_plugin(plugin_name, **kwargs):
try:
cache = CacheObject(plugin_name, **kwargs)
except AnsibleError as e:
if 'fact_caching_connection' in to_native(e):
raise AnsibleError("error, '%s' inventory cache plugin requires the one of the following to be set "
"to a writeable directory path:\nansible.cfg:\n[default]: fact_caching_connection,\n"
"[inventory]: cache_connection;\nEnvironment:\nANSIBLE_INVENTORY_CACHE_CONNECTION,\n"
"ANSIBLE_CACHE_PLUGIN_CONNECTION." % plugin_name)
else:
raise e
if plugin_name != 'memory' and kwargs and not getattr(cache._plugin, '_options', None):
raise AnsibleError('Unable to use cache plugin {0} for inventory. Cache options were provided but may not reconcile '
'correctly unless set via set_options. Refer to the porting guide if the plugin derives user settings '
'from ansible.constants.'.format(plugin_name))
return cache
class BaseInventoryPlugin(AnsiblePlugin):
""" Parses an Inventory Source"""
TYPE = 'generator'
# 3rd party plugins redefine this to
# use custom group name sanitization
# since constructed features enforce
# it by default.
_sanitize_group_name = staticmethod(to_safe_group_name)
def __init__(self):
super(BaseInventoryPlugin, self).__init__()
self._options = {}
self.inventory = None
self.display = display
self._vars = {}
def parse(self, inventory, loader, path, cache=True):
''' Populates inventory from the given data. Raises an error on any parse failure
:arg inventory: a copy of the previously accumulated inventory data,
to be updated with any new data this plugin provides.
The inventory can be empty if no other source/plugin ran successfully.
:arg loader: a reference to the DataLoader, which can read in YAML and JSON files,
it also has Vault support to automatically decrypt files.
:arg path: the string that represents the 'inventory source',
normally a path to a configuration file for this inventory,
but it can also be a raw string for this plugin to consume
:arg cache: a boolean that indicates if the plugin should use the cache or not
you can ignore if this plugin does not implement caching.
'''
self.loader = loader
self.inventory = inventory
self.templar = Templar(loader=loader)
self._vars = load_extra_vars(loader)
def verify_file(self, path):
''' Verify if file is usable by this plugin, base does minimal accessibility check
:arg path: a string that was passed as an inventory source,
it normally is a path to a config file, but this is not a requirement,
it can also be parsed itself as the inventory data to process.
So only call this base class if you expect it to be a file.
'''
valid = False
b_path = to_bytes(path, errors='surrogate_or_strict')
if (os.path.exists(b_path) and os.access(b_path, os.R_OK)):
valid = True
else:
self.display.vvv('Skipping due to inventory source not existing or not being readable by the current user')
return valid
def _populate_host_vars(self, hosts, variables, group=None, port=None):
if not isinstance(variables, Mapping):
raise AnsibleParserError("Invalid data from file, expected dictionary and got:\n\n%s" % to_native(variables))
for host in hosts:
self.inventory.add_host(host, group=group, port=port)
for k in variables:
self.inventory.set_variable(host, k, variables[k])
def _read_config_data(self, path):
''' validate config and set options as appropriate
:arg path: path to common yaml format config file for this plugin
'''
config = {}
try:
# avoid loader cache so meta: refresh_inventory can pick up config changes
# if we read more than once, fs cache should be good enough
config = self.loader.load_from_file(path, cache=False)
except Exception as e:
raise AnsibleParserError(to_native(e))
# a plugin can be loaded via many different names with redirection- if so, we want to accept any of those names
valid_names = getattr(self, '_redirected_names') or [self.NAME]
if not config:
# no data
raise AnsibleParserError("%s is empty" % (to_native(path)))
elif config.get('plugin') not in valid_names:
# this is not my config file
raise AnsibleParserError("Incorrect plugin name in file: %s" % config.get('plugin', 'none found'))
elif not isinstance(config, Mapping):
# configs are dictionaries
raise AnsibleParserError('inventory source has invalid structure, it should be a dictionary, got: %s' % type(config))
self.set_options(direct=config, var_options=self._vars)
if 'cache' in self._options and self.get_option('cache'):
cache_option_keys = [('_uri', 'cache_connection'), ('_timeout', 'cache_timeout'), ('_prefix', 'cache_prefix')]
cache_options = dict((opt[0], self.get_option(opt[1])) for opt in cache_option_keys if self.get_option(opt[1]) is not None)
self._cache = get_cache_plugin(self.get_option('cache_plugin'), **cache_options)
return config
def _consume_options(self, data):
''' update existing options from alternate configuration sources not normally used by Ansible.
Many API libraries already have existing configuration sources, this allows plugin author to leverage them.
:arg data: key/value pairs that correspond to configuration options for this plugin
'''
for k in self._options:
if k in data:
self._options[k] = data.pop(k)
def _expand_hostpattern(self, hostpattern):
'''
Takes a single host pattern and returns a list of hostnames and an
optional port number that applies to all of them.
'''
# Can the given hostpattern be parsed as a host with an optional port
# specification?
try:
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
except Exception:
# not a recognizable host pattern
pattern = hostpattern
port = None
# Once we have separated the pattern, we expand it into list of one or
# more hostnames, depending on whether it contains any [x:y] ranges.
if detect_range(pattern):
hostnames = expand_hostname_range(pattern)
else:
hostnames = [pattern]
return (hostnames, port)
class BaseFileInventoryPlugin(BaseInventoryPlugin):
""" Parses a File based Inventory Source"""
TYPE = 'storage'
def __init__(self):
super(BaseFileInventoryPlugin, self).__init__()
class DeprecatedCache(object):
def __init__(self, real_cacheable):
self.real_cacheable = real_cacheable
def get(self, key):
display.deprecated('InventoryModule should utilize self._cache as a dict instead of self.cache. '
'When expecting a KeyError, use self._cache[key] instead of using self.cache.get(key). '
'self._cache is a dictionary and will return a default value instead of raising a KeyError '
'when the key does not exist', version='2.12', collection_name='ansible.builtin')
return self.real_cacheable._cache[key]
def set(self, key, value):
display.deprecated('InventoryModule should utilize self._cache as a dict instead of self.cache. '
'To set the self._cache dictionary, use self._cache[key] = value instead of self.cache.set(key, value). '
'To force update the underlying cache plugin with the contents of self._cache before parse() is complete, '
'call self.set_cache_plugin and it will use the self._cache dictionary to update the cache plugin',
version='2.12', collection_name='ansible.builtin')
self.real_cacheable._cache[key] = value
self.real_cacheable.set_cache_plugin()
def __getattr__(self, name):
display.deprecated('InventoryModule should utilize self._cache instead of self.cache',
version='2.12', collection_name='ansible.builtin')
return self.real_cacheable._cache.__getattribute__(name)
class Cacheable(object):
_cache = CacheObject()
@property
def cache(self):
return DeprecatedCache(self)
def load_cache_plugin(self):
plugin_name = self.get_option('cache_plugin')
cache_option_keys = [('_uri', 'cache_connection'), ('_timeout', 'cache_timeout'), ('_prefix', 'cache_prefix')]
cache_options = dict((opt[0], self.get_option(opt[1])) for opt in cache_option_keys if self.get_option(opt[1]) is not None)
self._cache = get_cache_plugin(plugin_name, **cache_options)
def get_cache_key(self, path):
return "{0}_{1}".format(self.NAME, self._get_cache_prefix(path))
def _get_cache_prefix(self, path):
''' create predictable unique prefix for plugin/inventory '''
m = hashlib.sha1()
m.update(to_bytes(self.NAME, errors='surrogate_or_strict'))
d1 = m.hexdigest()
n = hashlib.sha1()
n.update(to_bytes(path, errors='surrogate_or_strict'))
d2 = n.hexdigest()
return 's_'.join([d1[:5], d2[:5]])
def clear_cache(self):
self._cache.flush()
def update_cache_if_changed(self):
self._cache.update_cache_if_changed()
def set_cache_plugin(self):
self._cache.set_cache()
class Constructable(object):
def _compose(self, template, variables):
''' helper method for plugins to compose variables for Ansible based on jinja2 expression and inventory vars'''
t = self.templar
try:
use_extra = self.get_option('use_extra_vars')
except Exception:
use_extra = False
if use_extra:
t.available_variables = combine_vars(variables, self._vars)
else:
t.available_variables = variables
return t.template('%s%s%s' % (t.environment.variable_start_string, template, t.environment.variable_end_string), disable_lookups=True)
def _set_composite_vars(self, compose, variables, host, strict=False):
''' loops over compose entries to create vars for hosts '''
if compose and isinstance(compose, dict):
for varname in compose:
try:
composite = self._compose(compose[varname], variables)
except Exception as e:
if strict:
raise AnsibleError("Could not set %s for host %s: %s" % (varname, host, to_native(e)))
continue
self.inventory.set_variable(host, varname, composite)
def _add_host_to_composed_groups(self, groups, variables, host, strict=False, fetch_hostvars=True):
''' helper to create complex groups for plugins based on jinja2 conditionals, hosts that meet the conditional are added to group'''
# process each 'group entry'
if groups and isinstance(groups, dict):
if fetch_hostvars:
variables = combine_vars(variables, self.inventory.get_host(host).get_vars())
self.templar.available_variables = variables
for group_name in groups:
conditional = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % groups[group_name]
group_name = self._sanitize_group_name(group_name)
try:
result = boolean(self.templar.template(conditional))
except Exception as e:
if strict:
raise AnsibleParserError("Could not add host %s to group %s: %s" % (host, group_name, to_native(e)))
continue
if result:
# ensure group exists, use sanitized name
group_name = self.inventory.add_group(group_name)
# add host to group
self.inventory.add_child(group_name, host)
def _add_host_to_keyed_groups(self, keys, variables, host, strict=False, fetch_hostvars=True):
''' helper to create groups for plugins based on variable values and add the corresponding hosts to it'''
if keys and isinstance(keys, list):
for keyed in keys:
if keyed and isinstance(keyed, dict):
if fetch_hostvars:
variables = combine_vars(variables, self.inventory.get_host(host).get_vars())
try:
key = self._compose(keyed.get('key'), variables)
except Exception as e:
if strict:
raise AnsibleParserError("Could not generate group for host %s from %s entry: %s" % (host, keyed.get('key'), to_native(e)))
continue
if key:
prefix = keyed.get('prefix', '')
sep = keyed.get('separator', '_')
raw_parent_name = keyed.get('parent_group', None)
if raw_parent_name:
try:
raw_parent_name = self.templar.template(raw_parent_name)
except AnsibleError as e:
if strict:
raise AnsibleParserError("Could not generate parent group %s for group %s: %s" % (raw_parent_name, key, to_native(e)))
continue
new_raw_group_names = []
if isinstance(key, string_types):
new_raw_group_names.append(key)
elif isinstance(key, list):
for name in key:
new_raw_group_names.append(name)
elif isinstance(key, Mapping):
for (gname, gval) in key.items():
name = '%s%s%s' % (gname, sep, gval)
new_raw_group_names.append(name)
else:
raise AnsibleParserError("Invalid group name format, expected a string or a list of them or dictionary, got: %s" % type(key))
for bare_name in new_raw_group_names:
if prefix == '' and self.get_option('leading_separator') is False:
sep = ''
gname = self._sanitize_group_name('%s%s%s' % (prefix, sep, bare_name))
result_gname = self.inventory.add_group(gname)
self.inventory.add_host(host, result_gname)
if raw_parent_name:
parent_name = self._sanitize_group_name(raw_parent_name)
self.inventory.add_group(parent_name)
self.inventory.add_child(parent_name, result_gname)
else:
# exclude case of empty list and dictionary, because these are valid constructions
# simply no groups need to be constructed, but are still falsy
if strict and key not in ([], {}):
raise AnsibleParserError("No key or key resulted empty for %s in host %s, invalid entry" % (keyed.get('key'), host))
else:
raise AnsibleParserError("Invalid keyed group entry, it must be a dictionary: %s " % keyed)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,140 |
cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/cache/__init__.py:49:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/cache/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74140
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:03Z |
python
| 2021-04-13T15:08:20Z |
lib/ansible/vars/fact_cache.py
|
# Copyright: (c) 2014, Michael DeHaan <[email protected]>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins.loader import cache_loader
from ansible.utils.display import Display
display = Display()
class FactCache(MutableMapping):
def __init__(self, *args, **kwargs):
self._plugin = cache_loader.get(C.CACHE_PLUGIN)
if not self._plugin:
raise AnsibleError('Unable to load the facts cache plugin (%s).' % (C.CACHE_PLUGIN))
super(FactCache, self).__init__(*args, **kwargs)
def __getitem__(self, key):
if not self._plugin.contains(key):
raise KeyError
return self._plugin.get(key)
def __setitem__(self, key, value):
self._plugin.set(key, value)
def __delitem__(self, key):
self._plugin.delete(key)
def __contains__(self, key):
return self._plugin.contains(key)
def __iter__(self):
return iter(self._plugin.keys())
def __len__(self):
return len(self._plugin.keys())
def copy(self):
""" Return a primitive copy of the keys and values from the cache. """
return dict(self)
def keys(self):
return self._plugin.keys()
def flush(self):
""" Flush the fact cache of all keys. """
self._plugin.flush()
def first_order_merge(self, key, value):
host_facts = {key: value}
try:
host_cache = self._plugin.get(key)
if host_cache:
host_cache.update(value)
host_facts[key] = host_cache
except KeyError:
pass
super(FactCache, self).update(host_facts)
def update(self, *args):
"""
Backwards compat shim
We thought we needed this to ensure we always called the plugin's set() method but
MutableMapping.update() will call our __setitem__() just fine. It's the calls to update
that we need to be careful of. This contains a bug::
fact_cache[host.name].update(facts)
It retrieves a *copy* of the facts for host.name and then updates the copy. So the changes
aren't persisted.
Instead we need to do::
fact_cache.update({host.name, facts})
Which will use FactCache's update() method.
We currently need this shim for backwards compat because the update() method that we had
implemented took key and value as arguments instead of taking a dict. We can remove the
shim in 2.12 as MutableMapping.update() should do everything that we need.
"""
if len(args) == 2:
# Deprecated. Call the new function with this name
display.deprecated('Calling FactCache().update(key, value) is deprecated. Use'
' FactCache().first_order_merge(key, value) if you want the old'
' behaviour or use FactCache().update({key: value}) if you want'
' dict-like behaviour.', version='2.12', collection_name='ansible.builtin')
return self.first_order_merge(*args)
elif len(args) == 1:
host_facts = args[0]
else:
raise TypeError('update expected at most 1 argument, got {0}'.format(len(args)))
super(FactCache, self).update(host_facts)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,140 |
cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/cache/__init__.py:49:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/cache/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74140
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:03Z |
python
| 2021-04-13T15:08:20Z |
test/sanity/ignore.txt
|
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
lib/ansible/cli/console.py pylint:blacklisted-name
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/cli/scripts/ansible_cli_stub.py pylint:ansible-deprecated-version
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/pycompat24.py no-get-exception
lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py no-basestring
lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
lib/ansible/module_utils/urls.py pylint:blacklisted-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
lib/ansible/modules/async_wrapper.py use-argspec-type-path
lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
lib/ansible/modules/copy.py pylint:blacklisted-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
lib/ansible/modules/git.py pylint:blacklisted-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/iptables.py pylint:blacklisted-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/pip.py pylint:blacklisted-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/uri.py pylint:blacklisted-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:blacklisted-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/conditional.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/play_context.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/async_status.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/inventory/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/inventory/script.py pylint:ansible-deprecated-version
lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
lib/ansible/plugins/strategy/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
lib/ansible/vars/fact_cache.py pylint:ansible-deprecated-version
lib/ansible/vars/hostvars.py pylint:blacklisted-name
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-argument-not-valid-type
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/integration/plugins/module_utils/database.py future-import-boilerplate
test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:blacklisted-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
test/units/modules/test_apt.py pylint:blacklisted-name
test/units/parsing/vault/test_vault.py pylint:blacklisted-name
test/units/playbook/role/test_role.py pylint:blacklisted-name
test/units/plugins/test_plugins.py pylint:blacklisted-name
test/units/template/test_templar.py pylint:blacklisted-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
test/utils/shippable/check_matrix.py replace-urlopen
test/utils/shippable/timing.py shebang
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,140 |
cache contains deprecated call to be removed in 2.12
|
##### SUMMARY
cache contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/plugins/cache/__init__.py:49:8: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/plugins/cache/__init__.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74140
|
https://github.com/ansible/ansible/pull/74198
|
39bd8b99ec8c6624207bf3556ac7f9626dad9173
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
| 2021-04-05T20:34:03Z |
python
| 2021-04-13T15:08:20Z |
test/units/plugins/cache/test_cache.py
|
# (c) 2012-2015, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest, mock
from ansible.errors import AnsibleError
from ansible.plugins.cache import FactCache, CachePluginAdjudicator
from ansible.plugins.cache.base import BaseCacheModule
from ansible.plugins.cache.memory import CacheModule as MemoryCache
from ansible.plugins.loader import cache_loader
import pytest
class TestCachePluginAdjudicator(unittest.TestCase):
def setUp(self):
# memory plugin cache
self.cache = CachePluginAdjudicator()
self.cache['cache_key'] = {'key1': 'value1', 'key2': 'value2'}
self.cache['cache_key_2'] = {'key': 'value'}
def test___setitem__(self):
self.cache['new_cache_key'] = {'new_key1': ['new_value1', 'new_value2']}
assert self.cache['new_cache_key'] == {'new_key1': ['new_value1', 'new_value2']}
def test_inner___setitem__(self):
self.cache['new_cache_key'] = {'new_key1': ['new_value1', 'new_value2']}
self.cache['new_cache_key']['new_key1'][0] = 'updated_value1'
assert self.cache['new_cache_key'] == {'new_key1': ['updated_value1', 'new_value2']}
def test___contains__(self):
assert 'cache_key' in self.cache
assert 'not_cache_key' not in self.cache
def test_get(self):
assert self.cache.get('cache_key') == {'key1': 'value1', 'key2': 'value2'}
def test_get_with_default(self):
assert self.cache.get('foo', 'bar') == 'bar'
def test_get_without_default(self):
assert self.cache.get('foo') is None
def test___getitem__(self):
with pytest.raises(KeyError) as err:
self.cache['foo']
def test_pop_with_default(self):
assert self.cache.pop('foo', 'bar') == 'bar'
def test_pop_without_default(self):
with pytest.raises(KeyError) as err:
assert self.cache.pop('foo')
def test_pop(self):
v = self.cache.pop('cache_key_2')
assert v == {'key': 'value'}
assert 'cache_key_2' not in self.cache
def test_update(self):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_flush(self):
# Fake that the cache already has some data in it but the adjudicator
# hasn't loaded it in.
self.cache._plugin.set('monkey', 'animal')
self.cache._plugin.set('wolf', 'animal')
self.cache._plugin.set('another wolf', 'another animal')
# The adjudicator does't know about the new entries
assert len(self.cache) == 2
# But the cache itself does
assert len(self.cache._plugin._cache) == 3
# If we call flush, both the adjudicator and the cache should flush
self.cache.flush()
assert len(self.cache) == 0
assert len(self.cache._plugin._cache) == 0
class TestFactCache(unittest.TestCase):
def setUp(self):
with mock.patch('ansible.constants.CACHE_PLUGIN', 'memory'):
self.cache = FactCache()
def test_copy(self):
self.cache['avocado'] = 'fruit'
self.cache['daisy'] = 'flower'
a_copy = self.cache.copy()
self.assertEqual(type(a_copy), dict)
self.assertEqual(a_copy, dict(avocado='fruit', daisy='flower'))
def test_plugin_load_failure(self):
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'):
self.assertRaisesRegexp(AnsibleError,
"Unable to load the facts cache plugin.*json.*",
FactCache)
def test_update(self):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_update_legacy(self):
self.cache.update('cache_key', {'key2': 'updatedvalue'})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
def test_update_legacy_key_exists(self):
self.cache['cache_key'] = {'key': 'value', 'key2': 'value2'}
self.cache.update('cache_key', {'key': 'updatedvalue'})
assert self.cache['cache_key']['key'] == 'updatedvalue'
assert self.cache['cache_key']['key2'] == 'value2'
class TestAbstractClass(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_subclass_error(self):
class CacheModule1(BaseCacheModule):
pass
with self.assertRaises(TypeError):
CacheModule1() # pylint: disable=abstract-class-instantiated
class CacheModule2(BaseCacheModule):
def get(self, key):
super(CacheModule2, self).get(key)
with self.assertRaises(TypeError):
CacheModule2() # pylint: disable=abstract-class-instantiated
def test_subclass_success(self):
class CacheModule3(BaseCacheModule):
def get(self, key):
super(CacheModule3, self).get(key)
def set(self, key, value):
super(CacheModule3, self).set(key, value)
def keys(self):
super(CacheModule3, self).keys()
def contains(self, key):
super(CacheModule3, self).contains(key)
def delete(self, key):
super(CacheModule3, self).delete(key)
def flush(self):
super(CacheModule3, self).flush()
def copy(self):
super(CacheModule3, self).copy()
self.assertIsInstance(CacheModule3(), CacheModule3)
def test_memory_cachemodule(self):
self.assertIsInstance(MemoryCache(), MemoryCache)
def test_memory_cachemodule_with_loader(self):
self.assertIsInstance(cache_loader.get('memory'), MemoryCache)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
changelogs/fragments/conditional-bare-vars.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
docs/docsite/rst/porting_guides/porting_guide_core_2.12.rst
|
.. _porting_2.12_guide:
**************************
Ansible 2.12 Porting Guide
**************************
This section discusses the behavioral changes between Ansible 2.11 and Ansible 2.12.
It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible.
We suggest you read this page along with `Ansible Changelog for 2.12 <https://github.com/ansible/ansible/blob/devel/changelogs/CHANGELOG-v2.12.rst>`_ to understand what updates you may need to make.
This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides <porting_guides>`.
.. contents:: Topics
Playbook
========
No notable changes
Command Line
============
No notable changes
Deprecated
==========
No notable changes
Modules
=======
* ``cron`` now requires ``name`` to be specified in all cases.
* ``cron`` no longer allows a ``reboot`` parameter. Use ``special_time: reboot`` instead.
Modules removed
---------------
The following modules no longer exist:
* No notable changes
Deprecation notices
-------------------
No notable changes
Noteworthy module changes
-------------------------
No notable changes
Plugins
=======
No notable changes
Porting custom scripts
======================
No notable changes
Networking
==========
No notable changes
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
lib/ansible/config/base.yml
|
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
ALLOW_WORLD_READABLE_TMPFILES:
name: Allow world-readable temporary files
default: False
description:
- This setting has been moved to the individual shell plugins as a plugin option :ref:`shell_plugins`.
- The existing configuration settings are still accepted with the shell plugin adding additional options, like variables.
- This message will be removed in 2.14.
type: boolean
ANSIBLE_CONNECTION_PATH:
name: Path of ansible-connection script
default: null
description:
- Specify where to look for the ansible-connection script. This location will be checked before searching $PATH.
- If null, ansible will start with the same directory as the ansible script.
type: path
env: [{name: ANSIBLE_CONNECTION_PATH}]
ini:
- {key: ansible_connection_path, section: persistent_connection}
yaml: {key: persistent_connection.ansible_connection_path}
version_added: "2.8"
ANSIBLE_COW_SELECTION:
name: Cowsay filter selection
default: default
description: This allows you to chose a specific cowsay stencil for the banners or use 'random' to cycle through them.
env: [{name: ANSIBLE_COW_SELECTION}]
ini:
- {key: cow_selection, section: defaults}
ANSIBLE_COW_ACCEPTLIST:
name: Cowsay filter acceptance list
default: ['bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'dragon', 'elephant-in-snake', 'elephant', 'eyes', 'hellokitty', 'kitty', 'luke-koala', 'meow', 'milk', 'moofasa', 'moose', 'ren', 'sheep', 'small', 'stegosaurus', 'stimpy', 'supermilker', 'three-eyes', 'turkey', 'turtle', 'tux', 'udder', 'vader-koala', 'vader', 'www']
description: White list of cowsay templates that are 'safe' to use, set to empty list if you want to enable all installed templates.
env:
- name: ANSIBLE_COW_WHITELIST
deprecated:
why: normalizing names to new standard
version: "2.15"
alternatives: 'ANSIBLE_COW_ACCEPTLIST'
- name: ANSIBLE_COW_ACCEPTLIST
version_added: '2.11'
ini:
- key: cow_whitelist
section: defaults
deprecated:
why: normalizing names to new standard
version: "2.15"
alternatives: 'cowsay_enabled_stencils'
- key: cowsay_enabled_stencils
section: defaults
version_added: '2.11'
type: list
ANSIBLE_FORCE_COLOR:
name: Force color output
default: False
description: This option forces color mode even when running without a TTY or the "nocolor" setting is True.
env: [{name: ANSIBLE_FORCE_COLOR}]
ini:
- {key: force_color, section: defaults}
type: boolean
yaml: {key: display.force_color}
ANSIBLE_NOCOLOR:
name: Suppress color output
default: False
description: This setting allows suppressing colorizing output, which is used to give a better indication of failure and status information.
env:
- name: ANSIBLE_NOCOLOR
# this is generic convention for CLI programs
- name: NO_COLOR
version_added: '2.11'
ini:
- {key: nocolor, section: defaults}
type: boolean
yaml: {key: display.nocolor}
ANSIBLE_NOCOWS:
name: Suppress cowsay output
default: False
description: If you have cowsay installed but want to avoid the 'cows' (why????), use this.
env: [{name: ANSIBLE_NOCOWS}]
ini:
- {key: nocows, section: defaults}
type: boolean
yaml: {key: display.i_am_no_fun}
ANSIBLE_COW_PATH:
name: Set path to cowsay command
default: null
description: Specify a custom cowsay path or swap in your cowsay implementation of choice
env: [{name: ANSIBLE_COW_PATH}]
ini:
- {key: cowpath, section: defaults}
type: string
yaml: {key: display.cowpath}
ANSIBLE_PIPELINING:
name: Connection pipelining
default: False
description:
- Pipelining, if supported by the connection plugin, reduces the number of network operations required to execute a module on the remote server,
by executing many Ansible modules without actual file transfer.
- This can result in a very significant performance improvement when enabled.
- "However this conflicts with privilege escalation (become). For example, when using 'sudo:' operations you must first
disable 'requiretty' in /etc/sudoers on all managed hosts, which is why it is disabled by default."
- This option is disabled if ``ANSIBLE_KEEP_REMOTE_FILES`` is enabled.
- This is a global option, each connection plugin can override either by having more specific options or not supporting pipelining at all.
env:
- name: ANSIBLE_PIPELINING
ini:
- section: defaults
key: pipelining
- section: connection
key: pipelining
type: boolean
ANY_ERRORS_FATAL:
name: Make Task failures fatal
default: False
description: Sets the default value for the any_errors_fatal keyword, if True, Task failures will be considered fatal errors.
env:
- name: ANSIBLE_ANY_ERRORS_FATAL
ini:
- section: defaults
key: any_errors_fatal
type: boolean
yaml: {key: errors.any_task_errors_fatal}
version_added: "2.4"
BECOME_ALLOW_SAME_USER:
name: Allow becoming the same user
default: False
description: This setting controls if become is skipped when remote user and become user are the same. I.E root sudo to root.
env: [{name: ANSIBLE_BECOME_ALLOW_SAME_USER}]
ini:
- {key: become_allow_same_user, section: privilege_escalation}
type: boolean
yaml: {key: privilege_escalation.become_allow_same_user}
AGNOSTIC_BECOME_PROMPT:
name: Display an agnostic become prompt
default: True
type: boolean
description: Display an agnostic become prompt instead of displaying a prompt containing the command line supplied become method
env: [{name: ANSIBLE_AGNOSTIC_BECOME_PROMPT}]
ini:
- {key: agnostic_become_prompt, section: privilege_escalation}
yaml: {key: privilege_escalation.agnostic_become_prompt}
version_added: "2.5"
CACHE_PLUGIN:
name: Persistent Cache plugin
default: memory
description: Chooses which cache plugin to use, the default 'memory' is ephemeral.
env: [{name: ANSIBLE_CACHE_PLUGIN}]
ini:
- {key: fact_caching, section: defaults}
yaml: {key: facts.cache.plugin}
CACHE_PLUGIN_CONNECTION:
name: Cache Plugin URI
default: ~
description: Defines connection or path information for the cache plugin
env: [{name: ANSIBLE_CACHE_PLUGIN_CONNECTION}]
ini:
- {key: fact_caching_connection, section: defaults}
yaml: {key: facts.cache.uri}
CACHE_PLUGIN_PREFIX:
name: Cache Plugin table prefix
default: ansible_facts
description: Prefix to use for cache plugin files/tables
env: [{name: ANSIBLE_CACHE_PLUGIN_PREFIX}]
ini:
- {key: fact_caching_prefix, section: defaults}
yaml: {key: facts.cache.prefix}
CACHE_PLUGIN_TIMEOUT:
name: Cache Plugin expiration timeout
default: 86400
description: Expiration timeout for the cache plugin data
env: [{name: ANSIBLE_CACHE_PLUGIN_TIMEOUT}]
ini:
- {key: fact_caching_timeout, section: defaults}
type: integer
yaml: {key: facts.cache.timeout}
COLLECTIONS_SCAN_SYS_PATH:
name: enable/disable scanning sys.path for installed collections
default: true
type: boolean
env:
- {name: ANSIBLE_COLLECTIONS_SCAN_SYS_PATH}
ini:
- {key: collections_scan_sys_path, section: defaults}
COLLECTIONS_PATHS:
name: ordered list of root paths for loading installed Ansible collections content
description: >
Colon separated paths in which Ansible will search for collections content.
Collections must be in nested *subdirectories*, not directly in these directories.
For example, if ``COLLECTIONS_PATHS`` includes ``~/.ansible/collections``,
and you want to add ``my.collection`` to that directory, it must be saved as
``~/.ansible/collections/ansible_collections/my/collection``.
default: ~/.ansible/collections:/usr/share/ansible/collections
type: pathspec
env:
- name: ANSIBLE_COLLECTIONS_PATHS # TODO: Deprecate this and ini once PATH has been in a few releases.
- name: ANSIBLE_COLLECTIONS_PATH
version_added: '2.10'
ini:
- key: collections_paths
section: defaults
- key: collections_path
section: defaults
version_added: '2.10'
COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH:
name: Defines behavior when loading a collection that does not support the current Ansible version
description:
- When a collection is loaded that does not support the running Ansible version (via the collection metadata key
`requires_ansible`), the default behavior is to issue a warning and continue anyway. Setting this value to `ignore`
skips the warning entirely, while setting it to `fatal` will immediately halt Ansible execution.
env: [{name: ANSIBLE_COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH}]
ini: [{key: collections_on_ansible_version_mismatch, section: defaults}]
choices: [error, warning, ignore]
default: warning
_COLOR_DEFAULTS: &color
name: placeholder for color settings' defaults
choices: ['black', 'bright gray', 'blue', 'white', 'green', 'bright blue', 'cyan', 'bright green', 'red', 'bright cyan', 'purple', 'bright red', 'yellow', 'bright purple', 'dark gray', 'bright yellow', 'magenta', 'bright magenta', 'normal']
COLOR_CHANGED:
<<: *color
name: Color for 'changed' task status
default: yellow
description: Defines the color to use on 'Changed' task status
env: [{name: ANSIBLE_COLOR_CHANGED}]
ini:
- {key: changed, section: colors}
COLOR_CONSOLE_PROMPT:
<<: *color
name: "Color for ansible-console's prompt task status"
default: white
description: Defines the default color to use for ansible-console
env: [{name: ANSIBLE_COLOR_CONSOLE_PROMPT}]
ini:
- {key: console_prompt, section: colors}
version_added: "2.7"
COLOR_DEBUG:
<<: *color
name: Color for debug statements
default: dark gray
description: Defines the color to use when emitting debug messages
env: [{name: ANSIBLE_COLOR_DEBUG}]
ini:
- {key: debug, section: colors}
COLOR_DEPRECATE:
<<: *color
name: Color for deprecation messages
default: purple
description: Defines the color to use when emitting deprecation messages
env: [{name: ANSIBLE_COLOR_DEPRECATE}]
ini:
- {key: deprecate, section: colors}
COLOR_DIFF_ADD:
<<: *color
name: Color for diff added display
default: green
description: Defines the color to use when showing added lines in diffs
env: [{name: ANSIBLE_COLOR_DIFF_ADD}]
ini:
- {key: diff_add, section: colors}
yaml: {key: display.colors.diff.add}
COLOR_DIFF_LINES:
<<: *color
name: Color for diff lines display
default: cyan
description: Defines the color to use when showing diffs
env: [{name: ANSIBLE_COLOR_DIFF_LINES}]
ini:
- {key: diff_lines, section: colors}
COLOR_DIFF_REMOVE:
<<: *color
name: Color for diff removed display
default: red
description: Defines the color to use when showing removed lines in diffs
env: [{name: ANSIBLE_COLOR_DIFF_REMOVE}]
ini:
- {key: diff_remove, section: colors}
COLOR_ERROR:
<<: *color
name: Color for error messages
default: red
description: Defines the color to use when emitting error messages
env: [{name: ANSIBLE_COLOR_ERROR}]
ini:
- {key: error, section: colors}
yaml: {key: colors.error}
COLOR_HIGHLIGHT:
<<: *color
name: Color for highlighting
default: white
description: Defines the color to use for highlighting
env: [{name: ANSIBLE_COLOR_HIGHLIGHT}]
ini:
- {key: highlight, section: colors}
COLOR_OK:
<<: *color
name: Color for 'ok' task status
default: green
description: Defines the color to use when showing 'OK' task status
env: [{name: ANSIBLE_COLOR_OK}]
ini:
- {key: ok, section: colors}
COLOR_SKIP:
<<: *color
name: Color for 'skip' task status
default: cyan
description: Defines the color to use when showing 'Skipped' task status
env: [{name: ANSIBLE_COLOR_SKIP}]
ini:
- {key: skip, section: colors}
COLOR_UNREACHABLE:
<<: *color
name: Color for 'unreachable' host state
default: bright red
description: Defines the color to use on 'Unreachable' status
env: [{name: ANSIBLE_COLOR_UNREACHABLE}]
ini:
- {key: unreachable, section: colors}
COLOR_VERBOSE:
<<: *color
name: Color for verbose messages
default: blue
description: Defines the color to use when emitting verbose messages. i.e those that show with '-v's.
env: [{name: ANSIBLE_COLOR_VERBOSE}]
ini:
- {key: verbose, section: colors}
COLOR_WARN:
<<: *color
name: Color for warning messages
default: bright purple
description: Defines the color to use when emitting warning messages
env: [{name: ANSIBLE_COLOR_WARN}]
ini:
- {key: warn, section: colors}
CONDITIONAL_BARE_VARS:
name: Allow bare variable evaluation in conditionals
default: False
type: boolean
description:
- With this setting on (True), running conditional evaluation 'var' is treated differently than 'var.subkey' as the first is evaluated
directly while the second goes through the Jinja2 parser. But 'false' strings in 'var' get evaluated as booleans.
- With this setting off they both evaluate the same but in cases in which 'var' was 'false' (a string) it won't get evaluated as a boolean anymore.
- Currently this setting defaults to 'True' but will soon change to 'False' and the setting itself will be removed in the future.
- Expect that this setting eventually will be deprecated after 2.12
env: [{name: ANSIBLE_CONDITIONAL_BARE_VARS}]
ini:
- {key: conditional_bare_variables, section: defaults}
version_added: "2.8"
COVERAGE_REMOTE_OUTPUT:
name: Sets the output directory and filename prefix to generate coverage run info.
description:
- Sets the output directory on the remote host to generate coverage reports to.
- Currently only used for remote coverage on PowerShell modules.
- This is for internal use only.
env:
- {name: _ANSIBLE_COVERAGE_REMOTE_OUTPUT}
vars:
- {name: _ansible_coverage_remote_output}
type: str
version_added: '2.9'
COVERAGE_REMOTE_PATHS:
name: Sets the list of paths to run coverage for.
description:
- A list of paths for files on the Ansible controller to run coverage for when executing on the remote host.
- Only files that match the path glob will have its coverage collected.
- Multiple path globs can be specified and are separated by ``:``.
- Currently only used for remote coverage on PowerShell modules.
- This is for internal use only.
default: '*'
env:
- {name: _ANSIBLE_COVERAGE_REMOTE_PATH_FILTER}
type: str
version_added: '2.9'
ACTION_WARNINGS:
name: Toggle action warnings
default: True
description:
- By default Ansible will issue a warning when received from a task action (module or action plugin)
- These warnings can be silenced by adjusting this setting to False.
env: [{name: ANSIBLE_ACTION_WARNINGS}]
ini:
- {key: action_warnings, section: defaults}
type: boolean
version_added: "2.5"
COMMAND_WARNINGS:
name: Command module warnings
default: False
description:
- Ansible can issue a warning when the shell or command module is used and the command appears to be similar to an existing Ansible module.
- These warnings can be silenced by adjusting this setting to False. You can also control this at the task level with the module option ``warn``.
- As of version 2.11, this is disabled by default.
env: [{name: ANSIBLE_COMMAND_WARNINGS}]
ini:
- {key: command_warnings, section: defaults}
type: boolean
version_added: "1.8"
deprecated:
why: the command warnings feature is being removed
version: "2.14"
LOCALHOST_WARNING:
name: Warning when using implicit inventory with only localhost
default: True
description:
- By default Ansible will issue a warning when there are no hosts in the
inventory.
- These warnings can be silenced by adjusting this setting to False.
env: [{name: ANSIBLE_LOCALHOST_WARNING}]
ini:
- {key: localhost_warning, section: defaults}
type: boolean
version_added: "2.6"
DOC_FRAGMENT_PLUGIN_PATH:
name: documentation fragment plugins path
default: ~/.ansible/plugins/doc_fragments:/usr/share/ansible/plugins/doc_fragments
description: Colon separated paths in which Ansible will search for Documentation Fragments Plugins.
env: [{name: ANSIBLE_DOC_FRAGMENT_PLUGINS}]
ini:
- {key: doc_fragment_plugins, section: defaults}
type: pathspec
DEFAULT_ACTION_PLUGIN_PATH:
name: Action plugins path
default: ~/.ansible/plugins/action:/usr/share/ansible/plugins/action
description: Colon separated paths in which Ansible will search for Action Plugins.
env: [{name: ANSIBLE_ACTION_PLUGINS}]
ini:
- {key: action_plugins, section: defaults}
type: pathspec
yaml: {key: plugins.action.path}
DEFAULT_ALLOW_UNSAFE_LOOKUPS:
name: Allow unsafe lookups
default: False
description:
- "When enabled, this option allows lookup plugins (whether used in variables as ``{{lookup('foo')}}`` or as a loop as with_foo)
to return data that is not marked 'unsafe'."
- By default, such data is marked as unsafe to prevent the templating engine from evaluating any jinja2 templating language,
as this could represent a security risk. This option is provided to allow for backwards-compatibility,
however users should first consider adding allow_unsafe=True to any lookups which may be expected to contain data which may be run
through the templating engine late
env: []
ini:
- {key: allow_unsafe_lookups, section: defaults}
type: boolean
version_added: "2.2.3"
DEFAULT_ASK_PASS:
name: Ask for the login password
default: False
description:
- This controls whether an Ansible playbook should prompt for a login password.
If using SSH keys for authentication, you probably do not needed to change this setting.
env: [{name: ANSIBLE_ASK_PASS}]
ini:
- {key: ask_pass, section: defaults}
type: boolean
yaml: {key: defaults.ask_pass}
DEFAULT_ASK_VAULT_PASS:
name: Ask for the vault password(s)
default: False
description:
- This controls whether an Ansible playbook should prompt for a vault password.
env: [{name: ANSIBLE_ASK_VAULT_PASS}]
ini:
- {key: ask_vault_pass, section: defaults}
type: boolean
DEFAULT_BECOME:
name: Enable privilege escalation (become)
default: False
description: Toggles the use of privilege escalation, allowing you to 'become' another user after login.
env: [{name: ANSIBLE_BECOME}]
ini:
- {key: become, section: privilege_escalation}
type: boolean
DEFAULT_BECOME_ASK_PASS:
name: Ask for the privilege escalation (become) password
default: False
description: Toggle to prompt for privilege escalation password.
env: [{name: ANSIBLE_BECOME_ASK_PASS}]
ini:
- {key: become_ask_pass, section: privilege_escalation}
type: boolean
DEFAULT_BECOME_METHOD:
name: Choose privilege escalation method
default: 'sudo'
description: Privilege escalation method to use when `become` is enabled.
env: [{name: ANSIBLE_BECOME_METHOD}]
ini:
- {section: privilege_escalation, key: become_method}
DEFAULT_BECOME_EXE:
name: Choose 'become' executable
default: ~
description: 'executable to use for privilege escalation, otherwise Ansible will depend on PATH'
env: [{name: ANSIBLE_BECOME_EXE}]
ini:
- {key: become_exe, section: privilege_escalation}
DEFAULT_BECOME_FLAGS:
name: Set 'become' executable options
default: ''
description: Flags to pass to the privilege escalation executable.
env: [{name: ANSIBLE_BECOME_FLAGS}]
ini:
- {key: become_flags, section: privilege_escalation}
BECOME_PLUGIN_PATH:
name: Become plugins path
default: ~/.ansible/plugins/become:/usr/share/ansible/plugins/become
description: Colon separated paths in which Ansible will search for Become Plugins.
env: [{name: ANSIBLE_BECOME_PLUGINS}]
ini:
- {key: become_plugins, section: defaults}
type: pathspec
version_added: "2.8"
DEFAULT_BECOME_USER:
# FIXME: should really be blank and make -u passing optional depending on it
name: Set the user you 'become' via privilege escalation
default: root
description: The user your login/remote user 'becomes' when using privilege escalation, most systems will use 'root' when no user is specified.
env: [{name: ANSIBLE_BECOME_USER}]
ini:
- {key: become_user, section: privilege_escalation}
yaml: {key: become.user}
DEFAULT_CACHE_PLUGIN_PATH:
name: Cache Plugins Path
default: ~/.ansible/plugins/cache:/usr/share/ansible/plugins/cache
description: Colon separated paths in which Ansible will search for Cache Plugins.
env: [{name: ANSIBLE_CACHE_PLUGINS}]
ini:
- {key: cache_plugins, section: defaults}
type: pathspec
CALLABLE_ACCEPT_LIST:
name: Template 'callable' accept list
default: []
description: Whitelist of callable methods to be made available to template evaluation
env:
- name: ANSIBLE_CALLABLE_WHITELIST
deprecated:
why: normalizing names to new standard
version: "2.15"
alternatives: 'ANSIBLE_CALLABLE_ENABLED'
- name: ANSIBLE_CALLABLE_ENABLED
version_added: '2.11'
ini:
- key: callable_whitelist
section: defaults
deprecated:
why: normalizing names to new standard
version: "2.15"
alternatives: 'callable_enabled'
- key: callable_enabled
section: defaults
version_added: '2.11'
type: list
CONTROLLER_PYTHON_WARNING:
name: Running Older than Python 3.8 Warning
default: True
description: Toggle to control showing warnings related to running a Python version
older than Python 3.8 on the controller
env: [{name: ANSIBLE_CONTROLLER_PYTHON_WARNING}]
ini:
- {key: controller_python_warning, section: defaults}
type: boolean
DEFAULT_CALLBACK_PLUGIN_PATH:
name: Callback Plugins Path
default: ~/.ansible/plugins/callback:/usr/share/ansible/plugins/callback
description: Colon separated paths in which Ansible will search for Callback Plugins.
env: [{name: ANSIBLE_CALLBACK_PLUGINS}]
ini:
- {key: callback_plugins, section: defaults}
type: pathspec
yaml: {key: plugins.callback.path}
CALLBACKS_ENABLED:
name: Enable callback plugins that require it.
default: []
description:
- "List of enabled callbacks, not all callbacks need enabling,
but many of those shipped with Ansible do as we don't want them activated by default."
env:
- name: ANSIBLE_CALLBACK_WHITELIST
deprecated:
why: normalizing names to new standard
version: "2.15"
alternatives: 'ANSIBLE_CALLBACKS_ENABLED'
- name: ANSIBLE_CALLBACKS_ENABLED
version_added: '2.11'
ini:
- key: callback_whitelist
section: defaults
deprecated:
why: normalizing names to new standard
version: "2.15"
alternatives: 'callback_enabled'
- key: callbacks_enabled
section: defaults
version_added: '2.11'
type: list
DEFAULT_CLICONF_PLUGIN_PATH:
name: Cliconf Plugins Path
default: ~/.ansible/plugins/cliconf:/usr/share/ansible/plugins/cliconf
description: Colon separated paths in which Ansible will search for Cliconf Plugins.
env: [{name: ANSIBLE_CLICONF_PLUGINS}]
ini:
- {key: cliconf_plugins, section: defaults}
type: pathspec
DEFAULT_CONNECTION_PLUGIN_PATH:
name: Connection Plugins Path
default: ~/.ansible/plugins/connection:/usr/share/ansible/plugins/connection
description: Colon separated paths in which Ansible will search for Connection Plugins.
env: [{name: ANSIBLE_CONNECTION_PLUGINS}]
ini:
- {key: connection_plugins, section: defaults}
type: pathspec
yaml: {key: plugins.connection.path}
DEFAULT_DEBUG:
name: Debug mode
default: False
description:
- "Toggles debug output in Ansible. This is *very* verbose and can hinder
multiprocessing. Debug output can also include secret information
despite no_log settings being enabled, which means debug mode should not be used in
production."
env: [{name: ANSIBLE_DEBUG}]
ini:
- {key: debug, section: defaults}
type: boolean
DEFAULT_EXECUTABLE:
name: Target shell executable
default: /bin/sh
description:
- "This indicates the command to use to spawn a shell under for Ansible's execution needs on a target.
Users may need to change this in rare instances when shell usage is constrained, but in most cases it may be left as is."
env: [{name: ANSIBLE_EXECUTABLE}]
ini:
- {key: executable, section: defaults}
DEFAULT_FACT_PATH:
name: local fact path
default: ~
description:
- "This option allows you to globally configure a custom path for 'local_facts' for the implied M(ansible.builtin.setup) task when using fact gathering."
- "If not set, it will fallback to the default from the M(ansible.builtin.setup) module: ``/etc/ansible/facts.d``."
- "This does **not** affect user defined tasks that use the M(ansible.builtin.setup) module."
env: [{name: ANSIBLE_FACT_PATH}]
ini:
- {key: fact_path, section: defaults}
type: string
yaml: {key: facts.gathering.fact_path}
DEFAULT_FILTER_PLUGIN_PATH:
name: Jinja2 Filter Plugins Path
default: ~/.ansible/plugins/filter:/usr/share/ansible/plugins/filter
description: Colon separated paths in which Ansible will search for Jinja2 Filter Plugins.
env: [{name: ANSIBLE_FILTER_PLUGINS}]
ini:
- {key: filter_plugins, section: defaults}
type: pathspec
DEFAULT_FORCE_HANDLERS:
name: Force handlers to run after failure
default: False
description:
- This option controls if notified handlers run on a host even if a failure occurs on that host.
- When false, the handlers will not run if a failure has occurred on a host.
- This can also be set per play or on the command line. See Handlers and Failure for more details.
env: [{name: ANSIBLE_FORCE_HANDLERS}]
ini:
- {key: force_handlers, section: defaults}
type: boolean
version_added: "1.9.1"
DEFAULT_FORKS:
name: Number of task forks
default: 5
description: Maximum number of forks Ansible will use to execute tasks on target hosts.
env: [{name: ANSIBLE_FORKS}]
ini:
- {key: forks, section: defaults}
type: integer
DEFAULT_GATHERING:
name: Gathering behaviour
default: 'implicit'
description:
- This setting controls the default policy of fact gathering (facts discovered about remote systems).
- "When 'implicit' (the default), the cache plugin will be ignored and facts will be gathered per play unless 'gather_facts: False' is set."
- "When 'explicit' the inverse is true, facts will not be gathered unless directly requested in the play."
- "The 'smart' value means each new host that has no facts discovered will be scanned,
but if the same host is addressed in multiple plays it will not be contacted again in the playbook run."
- "This option can be useful for those wishing to save fact gathering time. Both 'smart' and 'explicit' will use the cache plugin."
env: [{name: ANSIBLE_GATHERING}]
ini:
- key: gathering
section: defaults
version_added: "1.6"
choices: ['smart', 'explicit', 'implicit']
DEFAULT_GATHER_SUBSET:
name: Gather facts subset
default: ['all']
description:
- Set the `gather_subset` option for the M(ansible.builtin.setup) task in the implicit fact gathering.
See the module documentation for specifics.
- "It does **not** apply to user defined M(ansible.builtin.setup) tasks."
env: [{name: ANSIBLE_GATHER_SUBSET}]
ini:
- key: gather_subset
section: defaults
version_added: "2.1"
type: list
DEFAULT_GATHER_TIMEOUT:
name: Gather facts timeout
default: 10
description:
- Set the timeout in seconds for the implicit fact gathering.
- "It does **not** apply to user defined M(ansible.builtin.setup) tasks."
env: [{name: ANSIBLE_GATHER_TIMEOUT}]
ini:
- {key: gather_timeout, section: defaults}
type: integer
yaml: {key: defaults.gather_timeout}
DEFAULT_HANDLER_INCLUDES_STATIC:
name: Make handler M(ansible.builtin.include) static
default: False
description:
- "Since 2.0 M(ansible.builtin.include) can be 'dynamic', this setting (if True) forces that if the include appears in a ``handlers`` section to be 'static'."
env: [{name: ANSIBLE_HANDLER_INCLUDES_STATIC}]
ini:
- {key: handler_includes_static, section: defaults}
type: boolean
deprecated:
why: include itself is deprecated and this setting will not matter in the future
version: "2.12"
alternatives: none as its already built into the decision between include_tasks and import_tasks
DEFAULT_HASH_BEHAVIOUR:
name: Hash merge behaviour
default: replace
type: string
choices:
replace: Any variable that is defined more than once is overwritten using the order from variable precedence rules (highest wins).
merge: Any dictionary variable will be recursively merged with new definitions across the different variable definition sources.
description:
- This setting controls how duplicate definitions of dictionary variables (aka hash, map, associative array) are handled in Ansible.
- This does not affect variables whose values are scalars (integers, strings) or arrays.
- "**WARNING**, changing this setting is not recommended as this is fragile and makes your content (plays, roles, collections) non portable,
leading to continual confusion and misuse. Don't change this setting unless you think you have an absolute need for it."
- We recommend avoiding reusing variable names and relying on the ``combine`` filter and ``vars`` and ``varnames`` lookups
to create merged versions of the individual variables. In our experience this is rarely really needed and a sign that too much
complexity has been introduced into the data structures and plays.
- For some uses you can also look into custom vars_plugins to merge on input, even substituting the default ``host_group_vars``
that is in charge of parsing the ``host_vars/`` and ``group_vars/`` directories. Most users of this setting are only interested in inventory scope,
but the setting itself affects all sources and makes debugging even harder.
- All playbooks and roles in the official examples repos assume the default for this setting.
- Changing the setting to ``merge`` applies across variable sources, but many sources will internally still overwrite the variables.
For example ``include_vars`` will dedupe variables internally before updating Ansible, with 'last defined' overwriting previous definitions in same file.
- The Ansible project recommends you **avoid ``merge`` for new projects.**
- It is the intention of the Ansible developers to eventually deprecate and remove this setting, but it is being kept as some users do heavily rely on it.
New projects should **avoid 'merge'**.
env: [{name: ANSIBLE_HASH_BEHAVIOUR}]
ini:
- {key: hash_behaviour, section: defaults}
DEFAULT_HOST_LIST:
name: Inventory Source
default: /etc/ansible/hosts
description: Comma separated list of Ansible inventory sources
env:
- name: ANSIBLE_INVENTORY
expand_relative_paths: True
ini:
- key: inventory
section: defaults
type: pathlist
yaml: {key: defaults.inventory}
DEFAULT_HTTPAPI_PLUGIN_PATH:
name: HttpApi Plugins Path
default: ~/.ansible/plugins/httpapi:/usr/share/ansible/plugins/httpapi
description: Colon separated paths in which Ansible will search for HttpApi Plugins.
env: [{name: ANSIBLE_HTTPAPI_PLUGINS}]
ini:
- {key: httpapi_plugins, section: defaults}
type: pathspec
DEFAULT_INTERNAL_POLL_INTERVAL:
name: Internal poll interval
default: 0.001
env: []
ini:
- {key: internal_poll_interval, section: defaults}
type: float
version_added: "2.2"
description:
- This sets the interval (in seconds) of Ansible internal processes polling each other.
Lower values improve performance with large playbooks at the expense of extra CPU load.
Higher values are more suitable for Ansible usage in automation scenarios,
when UI responsiveness is not required but CPU usage might be a concern.
- "The default corresponds to the value hardcoded in Ansible <= 2.1"
DEFAULT_INVENTORY_PLUGIN_PATH:
name: Inventory Plugins Path
default: ~/.ansible/plugins/inventory:/usr/share/ansible/plugins/inventory
description: Colon separated paths in which Ansible will search for Inventory Plugins.
env: [{name: ANSIBLE_INVENTORY_PLUGINS}]
ini:
- {key: inventory_plugins, section: defaults}
type: pathspec
DEFAULT_JINJA2_EXTENSIONS:
name: Enabled Jinja2 extensions
default: []
description:
- This is a developer-specific feature that allows enabling additional Jinja2 extensions.
- "See the Jinja2 documentation for details. If you do not know what these do, you probably don't need to change this setting :)"
env: [{name: ANSIBLE_JINJA2_EXTENSIONS}]
ini:
- {key: jinja2_extensions, section: defaults}
DEFAULT_JINJA2_NATIVE:
name: Use Jinja2's NativeEnvironment for templating
default: False
description: This option preserves variable types during template operations. This requires Jinja2 >= 2.10.
env: [{name: ANSIBLE_JINJA2_NATIVE}]
ini:
- {key: jinja2_native, section: defaults}
type: boolean
yaml: {key: jinja2_native}
version_added: 2.7
DEFAULT_KEEP_REMOTE_FILES:
name: Keep remote files
default: False
description:
- Enables/disables the cleaning up of the temporary files Ansible used to execute the tasks on the remote.
- If this option is enabled it will disable ``ANSIBLE_PIPELINING``.
env: [{name: ANSIBLE_KEEP_REMOTE_FILES}]
ini:
- {key: keep_remote_files, section: defaults}
type: boolean
DEFAULT_LIBVIRT_LXC_NOSECLABEL:
# TODO: move to plugin
name: No security label on Lxc
default: False
description:
- "This setting causes libvirt to connect to lxc containers by passing --noseclabel to virsh.
This is necessary when running on systems which do not have SELinux."
env:
- name: LIBVIRT_LXC_NOSECLABEL
deprecated:
why: environment variables without ``ANSIBLE_`` prefix are deprecated
version: "2.12"
alternatives: the ``ANSIBLE_LIBVIRT_LXC_NOSECLABEL`` environment variable
- name: ANSIBLE_LIBVIRT_LXC_NOSECLABEL
ini:
- {key: libvirt_lxc_noseclabel, section: selinux}
type: boolean
version_added: "2.1"
DEFAULT_LOAD_CALLBACK_PLUGINS:
name: Load callbacks for adhoc
default: False
description:
- Controls whether callback plugins are loaded when running /usr/bin/ansible.
This may be used to log activity from the command line, send notifications, and so on.
Callback plugins are always loaded for ``ansible-playbook``.
env: [{name: ANSIBLE_LOAD_CALLBACK_PLUGINS}]
ini:
- {key: bin_ansible_callbacks, section: defaults}
type: boolean
version_added: "1.8"
DEFAULT_LOCAL_TMP:
name: Controller temporary directory
default: ~/.ansible/tmp
description: Temporary directory for Ansible to use on the controller.
env: [{name: ANSIBLE_LOCAL_TEMP}]
ini:
- {key: local_tmp, section: defaults}
type: tmppath
DEFAULT_LOG_PATH:
name: Ansible log file path
default: ~
description: File to which Ansible will log on the controller. When empty logging is disabled.
env: [{name: ANSIBLE_LOG_PATH}]
ini:
- {key: log_path, section: defaults}
type: path
DEFAULT_LOG_FILTER:
name: Name filters for python logger
default: []
description: List of logger names to filter out of the log file
env: [{name: ANSIBLE_LOG_FILTER}]
ini:
- {key: log_filter, section: defaults}
type: list
DEFAULT_LOOKUP_PLUGIN_PATH:
name: Lookup Plugins Path
description: Colon separated paths in which Ansible will search for Lookup Plugins.
default: ~/.ansible/plugins/lookup:/usr/share/ansible/plugins/lookup
env: [{name: ANSIBLE_LOOKUP_PLUGINS}]
ini:
- {key: lookup_plugins, section: defaults}
type: pathspec
yaml: {key: defaults.lookup_plugins}
DEFAULT_MANAGED_STR:
name: Ansible managed
default: 'Ansible managed'
description: Sets the macro for the 'ansible_managed' variable available for M(ansible.builtin.template) and M(ansible.windows.win_template) modules. This is only relevant for those two modules.
env: []
ini:
- {key: ansible_managed, section: defaults}
yaml: {key: defaults.ansible_managed}
DEFAULT_MODULE_ARGS:
name: Adhoc default arguments
default: ''
description:
- This sets the default arguments to pass to the ``ansible`` adhoc binary if no ``-a`` is specified.
env: [{name: ANSIBLE_MODULE_ARGS}]
ini:
- {key: module_args, section: defaults}
DEFAULT_MODULE_COMPRESSION:
name: Python module compression
default: ZIP_DEFLATED
description: Compression scheme to use when transferring Python modules to the target.
env: []
ini:
- {key: module_compression, section: defaults}
# vars:
# - name: ansible_module_compression
DEFAULT_MODULE_NAME:
name: Default adhoc module
default: command
description: "Module to use with the ``ansible`` AdHoc command, if none is specified via ``-m``."
env: []
ini:
- {key: module_name, section: defaults}
DEFAULT_MODULE_PATH:
name: Modules Path
description: Colon separated paths in which Ansible will search for Modules.
default: ~/.ansible/plugins/modules:/usr/share/ansible/plugins/modules
env: [{name: ANSIBLE_LIBRARY}]
ini:
- {key: library, section: defaults}
type: pathspec
DEFAULT_MODULE_UTILS_PATH:
name: Module Utils Path
description: Colon separated paths in which Ansible will search for Module utils files, which are shared by modules.
default: ~/.ansible/plugins/module_utils:/usr/share/ansible/plugins/module_utils
env: [{name: ANSIBLE_MODULE_UTILS}]
ini:
- {key: module_utils, section: defaults}
type: pathspec
DEFAULT_NETCONF_PLUGIN_PATH:
name: Netconf Plugins Path
default: ~/.ansible/plugins/netconf:/usr/share/ansible/plugins/netconf
description: Colon separated paths in which Ansible will search for Netconf Plugins.
env: [{name: ANSIBLE_NETCONF_PLUGINS}]
ini:
- {key: netconf_plugins, section: defaults}
type: pathspec
DEFAULT_NO_LOG:
name: No log
default: False
description: "Toggle Ansible's display and logging of task details, mainly used to avoid security disclosures."
env: [{name: ANSIBLE_NO_LOG}]
ini:
- {key: no_log, section: defaults}
type: boolean
DEFAULT_NO_TARGET_SYSLOG:
name: No syslog on target
default: False
description:
- Toggle Ansible logging to syslog on the target when it executes tasks. On Windows hosts this will disable a newer
style PowerShell modules from writting to the event log.
env: [{name: ANSIBLE_NO_TARGET_SYSLOG}]
ini:
- {key: no_target_syslog, section: defaults}
vars:
- name: ansible_no_target_syslog
version_added: '2.10'
type: boolean
yaml: {key: defaults.no_target_syslog}
DEFAULT_NULL_REPRESENTATION:
name: Represent a null
default: ~
description: What templating should return as a 'null' value. When not set it will let Jinja2 decide.
env: [{name: ANSIBLE_NULL_REPRESENTATION}]
ini:
- {key: null_representation, section: defaults}
type: none
DEFAULT_POLL_INTERVAL:
name: Async poll interval
default: 15
description:
- For asynchronous tasks in Ansible (covered in Asynchronous Actions and Polling),
this is how often to check back on the status of those tasks when an explicit poll interval is not supplied.
The default is a reasonably moderate 15 seconds which is a tradeoff between checking in frequently and
providing a quick turnaround when something may have completed.
env: [{name: ANSIBLE_POLL_INTERVAL}]
ini:
- {key: poll_interval, section: defaults}
type: integer
DEFAULT_PRIVATE_KEY_FILE:
name: Private key file
default: ~
description:
- Option for connections using a certificate or key file to authenticate, rather than an agent or passwords,
you can set the default value here to avoid re-specifying --private-key with every invocation.
env: [{name: ANSIBLE_PRIVATE_KEY_FILE}]
ini:
- {key: private_key_file, section: defaults}
type: path
DEFAULT_PRIVATE_ROLE_VARS:
name: Private role variables
default: False
description:
- Makes role variables inaccessible from other roles.
- This was introduced as a way to reset role variables to default values if
a role is used more than once in a playbook.
env: [{name: ANSIBLE_PRIVATE_ROLE_VARS}]
ini:
- {key: private_role_vars, section: defaults}
type: boolean
yaml: {key: defaults.private_role_vars}
DEFAULT_REMOTE_PORT:
name: Remote port
default: ~
description: Port to use in remote connections, when blank it will use the connection plugin default.
env: [{name: ANSIBLE_REMOTE_PORT}]
ini:
- {key: remote_port, section: defaults}
type: integer
yaml: {key: defaults.remote_port}
DEFAULT_REMOTE_USER:
name: Login/Remote User
default:
description:
- Sets the login user for the target machines
- "When blank it uses the connection plugin's default, normally the user currently executing Ansible."
env: [{name: ANSIBLE_REMOTE_USER}]
ini:
- {key: remote_user, section: defaults}
DEFAULT_ROLES_PATH:
name: Roles path
default: ~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles
description: Colon separated paths in which Ansible will search for Roles.
env: [{name: ANSIBLE_ROLES_PATH}]
expand_relative_paths: True
ini:
- {key: roles_path, section: defaults}
type: pathspec
yaml: {key: defaults.roles_path}
DEFAULT_SELINUX_SPECIAL_FS:
name: Problematic file systems
default: fuse, nfs, vboxsf, ramfs, 9p, vfat
description:
- "Some filesystems do not support safe operations and/or return inconsistent errors,
this setting makes Ansible 'tolerate' those in the list w/o causing fatal errors."
- Data corruption may occur and writes are not always verified when a filesystem is in the list.
env:
- name: ANSIBLE_SELINUX_SPECIAL_FS
version_added: "2.9"
ini:
- {key: special_context_filesystems, section: selinux}
type: list
DEFAULT_STDOUT_CALLBACK:
name: Main display callback plugin
default: default
description:
- "Set the main callback used to display Ansible output, you can only have one at a time."
- You can have many other callbacks, but just one can be in charge of stdout.
env: [{name: ANSIBLE_STDOUT_CALLBACK}]
ini:
- {key: stdout_callback, section: defaults}
ENABLE_TASK_DEBUGGER:
name: Whether to enable the task debugger
default: False
description:
- Whether or not to enable the task debugger, this previously was done as a strategy plugin.
- Now all strategy plugins can inherit this behavior. The debugger defaults to activating when
- a task is failed on unreachable. Use the debugger keyword for more flexibility.
type: boolean
env: [{name: ANSIBLE_ENABLE_TASK_DEBUGGER}]
ini:
- {key: enable_task_debugger, section: defaults}
version_added: "2.5"
TASK_DEBUGGER_IGNORE_ERRORS:
name: Whether a failed task with ignore_errors=True will still invoke the debugger
default: True
description:
- This option defines whether the task debugger will be invoked on a failed task when ignore_errors=True
is specified.
- True specifies that the debugger will honor ignore_errors, False will not honor ignore_errors.
type: boolean
env: [{name: ANSIBLE_TASK_DEBUGGER_IGNORE_ERRORS}]
ini:
- {key: task_debugger_ignore_errors, section: defaults}
version_added: "2.7"
DEFAULT_STRATEGY:
name: Implied strategy
default: 'linear'
description: Set the default strategy used for plays.
env: [{name: ANSIBLE_STRATEGY}]
ini:
- {key: strategy, section: defaults}
version_added: "2.3"
DEFAULT_STRATEGY_PLUGIN_PATH:
name: Strategy Plugins Path
description: Colon separated paths in which Ansible will search for Strategy Plugins.
default: ~/.ansible/plugins/strategy:/usr/share/ansible/plugins/strategy
env: [{name: ANSIBLE_STRATEGY_PLUGINS}]
ini:
- {key: strategy_plugins, section: defaults}
type: pathspec
DEFAULT_SU:
default: False
description: 'Toggle the use of "su" for tasks.'
env: [{name: ANSIBLE_SU}]
ini:
- {key: su, section: defaults}
type: boolean
yaml: {key: defaults.su}
DEFAULT_SYSLOG_FACILITY:
name: syslog facility
default: LOG_USER
description: Syslog facility to use when Ansible logs to the remote target
env: [{name: ANSIBLE_SYSLOG_FACILITY}]
ini:
- {key: syslog_facility, section: defaults}
DEFAULT_TASK_INCLUDES_STATIC:
name: Task include static
default: False
description:
- The `include` tasks can be static or dynamic, this toggles the default expected behaviour if autodetection fails and it is not explicitly set in task.
env: [{name: ANSIBLE_TASK_INCLUDES_STATIC}]
ini:
- {key: task_includes_static, section: defaults}
type: boolean
version_added: "2.1"
deprecated:
why: include itself is deprecated and this setting will not matter in the future
version: "2.12"
alternatives: None, as its already built into the decision between include_tasks and import_tasks
DEFAULT_TERMINAL_PLUGIN_PATH:
name: Terminal Plugins Path
default: ~/.ansible/plugins/terminal:/usr/share/ansible/plugins/terminal
description: Colon separated paths in which Ansible will search for Terminal Plugins.
env: [{name: ANSIBLE_TERMINAL_PLUGINS}]
ini:
- {key: terminal_plugins, section: defaults}
type: pathspec
DEFAULT_TEST_PLUGIN_PATH:
name: Jinja2 Test Plugins Path
description: Colon separated paths in which Ansible will search for Jinja2 Test Plugins.
default: ~/.ansible/plugins/test:/usr/share/ansible/plugins/test
env: [{name: ANSIBLE_TEST_PLUGINS}]
ini:
- {key: test_plugins, section: defaults}
type: pathspec
DEFAULT_TIMEOUT:
name: Connection timeout
default: 10
description: This is the default timeout for connection plugins to use.
env: [{name: ANSIBLE_TIMEOUT}]
ini:
- {key: timeout, section: defaults}
type: integer
DEFAULT_TRANSPORT:
# note that ssh_utils refs this and needs to be updated if removed
name: Connection plugin
default: smart
description: "Default connection plugin to use, the 'smart' option will toggle between 'ssh' and 'paramiko' depending on controller OS and ssh versions"
env: [{name: ANSIBLE_TRANSPORT}]
ini:
- {key: transport, section: defaults}
DEFAULT_UNDEFINED_VAR_BEHAVIOR:
name: Jinja2 fail on undefined
default: True
version_added: "1.3"
description:
- When True, this causes ansible templating to fail steps that reference variable names that are likely typoed.
- "Otherwise, any '{{ template_expression }}' that contains undefined variables will be rendered in a template or ansible action line exactly as written."
env: [{name: ANSIBLE_ERROR_ON_UNDEFINED_VARS}]
ini:
- {key: error_on_undefined_vars, section: defaults}
type: boolean
DEFAULT_VARS_PLUGIN_PATH:
name: Vars Plugins Path
default: ~/.ansible/plugins/vars:/usr/share/ansible/plugins/vars
description: Colon separated paths in which Ansible will search for Vars Plugins.
env: [{name: ANSIBLE_VARS_PLUGINS}]
ini:
- {key: vars_plugins, section: defaults}
type: pathspec
# TODO: unused?
#DEFAULT_VAR_COMPRESSION_LEVEL:
# default: 0
# description: 'TODO: write it'
# env: [{name: ANSIBLE_VAR_COMPRESSION_LEVEL}]
# ini:
# - {key: var_compression_level, section: defaults}
# type: integer
# yaml: {key: defaults.var_compression_level}
DEFAULT_VAULT_ID_MATCH:
name: Force vault id match
default: False
description: 'If true, decrypting vaults with a vault id will only try the password from the matching vault-id'
env: [{name: ANSIBLE_VAULT_ID_MATCH}]
ini:
- {key: vault_id_match, section: defaults}
yaml: {key: defaults.vault_id_match}
DEFAULT_VAULT_IDENTITY:
name: Vault id label
default: default
description: 'The label to use for the default vault id label in cases where a vault id label is not provided'
env: [{name: ANSIBLE_VAULT_IDENTITY}]
ini:
- {key: vault_identity, section: defaults}
yaml: {key: defaults.vault_identity}
DEFAULT_VAULT_ENCRYPT_IDENTITY:
name: Vault id to use for encryption
default:
description: 'The vault_id to use for encrypting by default. If multiple vault_ids are provided, this specifies which to use for encryption. The --encrypt-vault-id cli option overrides the configured value.'
env: [{name: ANSIBLE_VAULT_ENCRYPT_IDENTITY}]
ini:
- {key: vault_encrypt_identity, section: defaults}
yaml: {key: defaults.vault_encrypt_identity}
DEFAULT_VAULT_IDENTITY_LIST:
name: Default vault ids
default: []
description: 'A list of vault-ids to use by default. Equivalent to multiple --vault-id args. Vault-ids are tried in order.'
env: [{name: ANSIBLE_VAULT_IDENTITY_LIST}]
ini:
- {key: vault_identity_list, section: defaults}
type: list
yaml: {key: defaults.vault_identity_list}
DEFAULT_VAULT_PASSWORD_FILE:
name: Vault password file
default: ~
description: 'The vault password file to use. Equivalent to --vault-password-file or --vault-id'
env: [{name: ANSIBLE_VAULT_PASSWORD_FILE}]
ini:
- {key: vault_password_file, section: defaults}
type: path
yaml: {key: defaults.vault_password_file}
DEFAULT_VERBOSITY:
name: Verbosity
default: 0
description: Sets the default verbosity, equivalent to the number of ``-v`` passed in the command line.
env: [{name: ANSIBLE_VERBOSITY}]
ini:
- {key: verbosity, section: defaults}
type: integer
DEPRECATION_WARNINGS:
name: Deprecation messages
default: True
description: "Toggle to control the showing of deprecation warnings"
env: [{name: ANSIBLE_DEPRECATION_WARNINGS}]
ini:
- {key: deprecation_warnings, section: defaults}
type: boolean
DEVEL_WARNING:
name: Running devel warning
default: True
description: Toggle to control showing warnings related to running devel
env: [{name: ANSIBLE_DEVEL_WARNING}]
ini:
- {key: devel_warning, section: defaults}
type: boolean
DIFF_ALWAYS:
name: Show differences
default: False
description: Configuration toggle to tell modules to show differences when in 'changed' status, equivalent to ``--diff``.
env: [{name: ANSIBLE_DIFF_ALWAYS}]
ini:
- {key: always, section: diff}
type: bool
DIFF_CONTEXT:
name: Difference context
default: 3
description: How many lines of context to show when displaying the differences between files.
env: [{name: ANSIBLE_DIFF_CONTEXT}]
ini:
- {key: context, section: diff}
type: integer
DISPLAY_ARGS_TO_STDOUT:
name: Show task arguments
default: False
description:
- "Normally ``ansible-playbook`` will print a header for each task that is run.
These headers will contain the name: field from the task if you specified one.
If you didn't then ``ansible-playbook`` uses the task's action to help you tell which task is presently running.
Sometimes you run many of the same action and so you want more information about the task to differentiate it from others of the same action.
If you set this variable to True in the config then ``ansible-playbook`` will also include the task's arguments in the header."
- "This setting defaults to False because there is a chance that you have sensitive values in your parameters and
you do not want those to be printed."
- "If you set this to True you should be sure that you have secured your environment's stdout
(no one can shoulder surf your screen and you aren't saving stdout to an insecure file) or
made sure that all of your playbooks explicitly added the ``no_log: True`` parameter to tasks which have sensitive values
See How do I keep secret data in my playbook? for more information."
env: [{name: ANSIBLE_DISPLAY_ARGS_TO_STDOUT}]
ini:
- {key: display_args_to_stdout, section: defaults}
type: boolean
version_added: "2.1"
DISPLAY_SKIPPED_HOSTS:
name: Show skipped results
default: True
description: "Toggle to control displaying skipped task/host entries in a task in the default callback"
env:
- name: DISPLAY_SKIPPED_HOSTS
deprecated:
why: environment variables without ``ANSIBLE_`` prefix are deprecated
version: "2.12"
alternatives: the ``ANSIBLE_DISPLAY_SKIPPED_HOSTS`` environment variable
- name: ANSIBLE_DISPLAY_SKIPPED_HOSTS
ini:
- {key: display_skipped_hosts, section: defaults}
type: boolean
DOCSITE_ROOT_URL:
name: Root docsite URL
default: https://docs.ansible.com/ansible/
description: Root docsite URL used to generate docs URLs in warning/error text;
must be an absolute URL with valid scheme and trailing slash.
ini:
- {key: docsite_root_url, section: defaults}
version_added: "2.8"
DUPLICATE_YAML_DICT_KEY:
name: Controls ansible behaviour when finding duplicate keys in YAML.
default: warn
description:
- By default Ansible will issue a warning when a duplicate dict key is encountered in YAML.
- These warnings can be silenced by adjusting this setting to False.
env: [{name: ANSIBLE_DUPLICATE_YAML_DICT_KEY}]
ini:
- {key: duplicate_dict_key, section: defaults}
type: string
choices: ['warn', 'error', 'ignore']
version_added: "2.9"
ERROR_ON_MISSING_HANDLER:
name: Missing handler error
default: True
description: "Toggle to allow missing handlers to become a warning instead of an error when notifying."
env: [{name: ANSIBLE_ERROR_ON_MISSING_HANDLER}]
ini:
- {key: error_on_missing_handler, section: defaults}
type: boolean
CONNECTION_FACTS_MODULES:
name: Map of connections to fact modules
default:
# use ansible.legacy names on unqualified facts modules to allow library/ overrides
asa: ansible.legacy.asa_facts
cisco.asa.asa: cisco.asa.asa_facts
eos: ansible.legacy.eos_facts
arista.eos.eos: arista.eos.eos_facts
frr: ansible.legacy.frr_facts
frr.frr.frr: frr.frr.frr_facts
ios: ansible.legacy.ios_facts
cisco.ios.ios: cisco.ios.ios_facts
iosxr: ansible.legacy.iosxr_facts
cisco.iosxr.iosxr: cisco.iosxr.iosxr_facts
junos: ansible.legacy.junos_facts
junipernetworks.junos.junos: junipernetworks.junos.junos_facts
nxos: ansible.legacy.nxos_facts
cisco.nxos.nxos: cisco.nxos.nxos_facts
vyos: ansible.legacy.vyos_facts
vyos.vyos.vyos: vyos.vyos.vyos_facts
exos: ansible.legacy.exos_facts
extreme.exos.exos: extreme.exos.exos_facts
slxos: ansible.legacy.slxos_facts
extreme.slxos.slxos: extreme.slxos.slxos_facts
voss: ansible.legacy.voss_facts
extreme.voss.voss: extreme.voss.voss_facts
ironware: ansible.legacy.ironware_facts
community.network.ironware: community.network.ironware_facts
description: "Which modules to run during a play's fact gathering stage based on connection"
type: dict
FACTS_MODULES:
name: Gather Facts Modules
default:
- smart
description: "Which modules to run during a play's fact gathering stage, using the default of 'smart' will try to figure it out based on connection type."
env: [{name: ANSIBLE_FACTS_MODULES}]
ini:
- {key: facts_modules, section: defaults}
type: list
vars:
- name: ansible_facts_modules
GALAXY_IGNORE_CERTS:
name: Galaxy validate certs
default: False
description:
- If set to yes, ansible-galaxy will not validate TLS certificates.
This can be useful for testing against a server with a self-signed certificate.
env: [{name: ANSIBLE_GALAXY_IGNORE}]
ini:
- {key: ignore_certs, section: galaxy}
type: boolean
GALAXY_ROLE_SKELETON:
name: Galaxy role or collection skeleton directory
default:
description: Role or collection skeleton directory to use as a template for the ``init`` action in ``ansible-galaxy``, same as ``--role-skeleton``.
env: [{name: ANSIBLE_GALAXY_ROLE_SKELETON}]
ini:
- {key: role_skeleton, section: galaxy}
type: path
GALAXY_ROLE_SKELETON_IGNORE:
name: Galaxy skeleton ignore
default: ["^.git$", "^.*/.git_keep$"]
description: patterns of files to ignore inside a Galaxy role or collection skeleton directory
env: [{name: ANSIBLE_GALAXY_ROLE_SKELETON_IGNORE}]
ini:
- {key: role_skeleton_ignore, section: galaxy}
type: list
# TODO: unused?
#GALAXY_SCMS:
# name: Galaxy SCMS
# default: git, hg
# description: Available galaxy source control management systems.
# env: [{name: ANSIBLE_GALAXY_SCMS}]
# ini:
# - {key: scms, section: galaxy}
# type: list
GALAXY_SERVER:
default: https://galaxy.ansible.com
description: "URL to prepend when roles don't specify the full URI, assume they are referencing this server as the source."
env: [{name: ANSIBLE_GALAXY_SERVER}]
ini:
- {key: server, section: galaxy}
yaml: {key: galaxy.server}
GALAXY_SERVER_LIST:
description:
- A list of Galaxy servers to use when installing a collection.
- The value corresponds to the config ini header ``[galaxy_server.{{item}}]`` which defines the server details.
- 'See :ref:`galaxy_server_config` for more details on how to define a Galaxy server.'
- The order of servers in this list is used to as the order in which a collection is resolved.
- Setting this config option will ignore the :ref:`galaxy_server` config option.
env: [{name: ANSIBLE_GALAXY_SERVER_LIST}]
ini:
- {key: server_list, section: galaxy}
type: list
version_added: "2.9"
GALAXY_TOKEN_PATH:
default: ~/.ansible/galaxy_token
description: "Local path to galaxy access token file"
env: [{name: ANSIBLE_GALAXY_TOKEN_PATH}]
ini:
- {key: token_path, section: galaxy}
type: path
version_added: "2.9"
GALAXY_DISPLAY_PROGRESS:
default: ~
description:
- Some steps in ``ansible-galaxy`` display a progress wheel which can cause issues on certain displays or when
outputing the stdout to a file.
- This config option controls whether the display wheel is shown or not.
- The default is to show the display wheel if stdout has a tty.
env: [{name: ANSIBLE_GALAXY_DISPLAY_PROGRESS}]
ini:
- {key: display_progress, section: galaxy}
type: bool
version_added: "2.10"
GALAXY_CACHE_DIR:
default: ~/.ansible/galaxy_cache
description:
- The directory that stores cached responses from a Galaxy server.
- This is only used by the ``ansible-galaxy collection install`` and ``download`` commands.
- Cache files inside this dir will be ignored if they are world writable.
env:
- name: ANSIBLE_GALAXY_CACHE_DIR
ini:
- section: galaxy
key: cache_dir
type: path
version_added: '2.11'
HOST_KEY_CHECKING:
# note: constant not in use by ssh plugin anymore
# TODO: check non ssh connection plugins for use/migration
name: Check host keys
default: True
description: 'Set this to "False" if you want to avoid host key checking by the underlying tools Ansible uses to connect to the host'
env: [{name: ANSIBLE_HOST_KEY_CHECKING}]
ini:
- {key: host_key_checking, section: defaults}
type: boolean
HOST_PATTERN_MISMATCH:
name: Control host pattern mismatch behaviour
default: 'warning'
description: This setting changes the behaviour of mismatched host patterns, it allows you to force a fatal error, a warning or just ignore it
env: [{name: ANSIBLE_HOST_PATTERN_MISMATCH}]
ini:
- {key: host_pattern_mismatch, section: inventory}
choices: ['warning', 'error', 'ignore']
version_added: "2.8"
INTERPRETER_PYTHON:
name: Python interpreter path (or automatic discovery behavior) used for module execution
default: auto_legacy
env: [{name: ANSIBLE_PYTHON_INTERPRETER}]
ini:
- {key: interpreter_python, section: defaults}
vars:
- {name: ansible_python_interpreter}
version_added: "2.8"
description:
- Path to the Python interpreter to be used for module execution on remote targets, or an automatic discovery mode.
Supported discovery modes are ``auto``, ``auto_silent``, and ``auto_legacy`` (the default). All discovery modes
employ a lookup table to use the included system Python (on distributions known to include one), falling back to a
fixed ordered list of well-known Python interpreter locations if a platform-specific default is not available. The
fallback behavior will issue a warning that the interpreter should be set explicitly (since interpreters installed
later may change which one is used). This warning behavior can be disabled by setting ``auto_silent``. The default
value of ``auto_legacy`` provides all the same behavior, but for backwards-compatibility with older Ansible releases
that always defaulted to ``/usr/bin/python``, will use that interpreter if present (and issue a warning that the
default behavior will change to that of ``auto`` in a future Ansible release.
INTERPRETER_PYTHON_DISTRO_MAP:
name: Mapping of known included platform pythons for various Linux distros
default:
centos: &rhelish
'6': /usr/bin/python
'8': /usr/libexec/platform-python
debian:
'10': /usr/bin/python3
fedora:
'23': /usr/bin/python3
oracle: *rhelish
redhat: *rhelish
rhel: *rhelish
ubuntu:
'14': /usr/bin/python
'16': /usr/bin/python3
version_added: "2.8"
# FUTURE: add inventory override once we're sure it can't be abused by a rogue target
# FUTURE: add a platform layer to the map so we could use for, eg, freebsd/macos/etc?
INTERPRETER_PYTHON_FALLBACK:
name: Ordered list of Python interpreters to check for in discovery
default:
- /usr/bin/python
- python3.9
- python3.8
- python3.7
- python3.6
- python3.5
- python2.7
- python2.6
- /usr/libexec/platform-python
- /usr/bin/python3
- python
# FUTURE: add inventory override once we're sure it can't be abused by a rogue target
version_added: "2.8"
TRANSFORM_INVALID_GROUP_CHARS:
name: Transform invalid characters in group names
default: 'never'
description:
- Make ansible transform invalid characters in group names supplied by inventory sources.
- If 'never' it will allow for the group name but warn about the issue.
- When 'ignore', it does the same as 'never', without issuing a warning.
- When 'always' it will replace any invalid characters with '_' (underscore) and warn the user
- When 'silently', it does the same as 'always', without issuing a warning.
env: [{name: ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS}]
ini:
- {key: force_valid_group_names, section: defaults}
type: string
choices: ['always', 'never', 'ignore', 'silently']
version_added: '2.8'
INVALID_TASK_ATTRIBUTE_FAILED:
name: Controls whether invalid attributes for a task result in errors instead of warnings
default: True
description: If 'false', invalid attributes for a task will result in warnings instead of errors
type: boolean
env:
- name: ANSIBLE_INVALID_TASK_ATTRIBUTE_FAILED
ini:
- key: invalid_task_attribute_failed
section: defaults
version_added: "2.7"
INVENTORY_ANY_UNPARSED_IS_FAILED:
name: Controls whether any unparseable inventory source is a fatal error
default: False
description: >
If 'true', it is a fatal error when any given inventory source
cannot be successfully parsed by any available inventory plugin;
otherwise, this situation only attracts a warning.
type: boolean
env: [{name: ANSIBLE_INVENTORY_ANY_UNPARSED_IS_FAILED}]
ini:
- {key: any_unparsed_is_failed, section: inventory}
version_added: "2.7"
INVENTORY_CACHE_ENABLED:
name: Inventory caching enabled
default: False
description: Toggle to turn on inventory caching
env: [{name: ANSIBLE_INVENTORY_CACHE}]
ini:
- {key: cache, section: inventory}
type: bool
INVENTORY_CACHE_PLUGIN:
name: Inventory cache plugin
description: The plugin for caching inventory. If INVENTORY_CACHE_PLUGIN is not provided CACHE_PLUGIN can be used instead.
env: [{name: ANSIBLE_INVENTORY_CACHE_PLUGIN}]
ini:
- {key: cache_plugin, section: inventory}
INVENTORY_CACHE_PLUGIN_CONNECTION:
name: Inventory cache plugin URI to override the defaults section
description: The inventory cache connection. If INVENTORY_CACHE_PLUGIN_CONNECTION is not provided CACHE_PLUGIN_CONNECTION can be used instead.
env: [{name: ANSIBLE_INVENTORY_CACHE_CONNECTION}]
ini:
- {key: cache_connection, section: inventory}
INVENTORY_CACHE_PLUGIN_PREFIX:
name: Inventory cache plugin table prefix
description: The table prefix for the cache plugin. If INVENTORY_CACHE_PLUGIN_PREFIX is not provided CACHE_PLUGIN_PREFIX can be used instead.
env: [{name: ANSIBLE_INVENTORY_CACHE_PLUGIN_PREFIX}]
default: ansible_facts
ini:
- {key: cache_prefix, section: inventory}
INVENTORY_CACHE_TIMEOUT:
name: Inventory cache plugin expiration timeout
description: Expiration timeout for the inventory cache plugin data. If INVENTORY_CACHE_TIMEOUT is not provided CACHE_TIMEOUT can be used instead.
default: 3600
env: [{name: ANSIBLE_INVENTORY_CACHE_TIMEOUT}]
ini:
- {key: cache_timeout, section: inventory}
INVENTORY_ENABLED:
name: Active Inventory plugins
default: ['host_list', 'script', 'auto', 'yaml', 'ini', 'toml']
description: List of enabled inventory plugins, it also determines the order in which they are used.
env: [{name: ANSIBLE_INVENTORY_ENABLED}]
ini:
- {key: enable_plugins, section: inventory}
type: list
INVENTORY_EXPORT:
name: Set ansible-inventory into export mode
default: False
description: Controls if ansible-inventory will accurately reflect Ansible's view into inventory or its optimized for exporting.
env: [{name: ANSIBLE_INVENTORY_EXPORT}]
ini:
- {key: export, section: inventory}
type: bool
INVENTORY_IGNORE_EXTS:
name: Inventory ignore extensions
default: "{{(REJECT_EXTS + ('.orig', '.ini', '.cfg', '.retry'))}}"
description: List of extensions to ignore when using a directory as an inventory source
env: [{name: ANSIBLE_INVENTORY_IGNORE}]
ini:
- {key: inventory_ignore_extensions, section: defaults}
- {key: ignore_extensions, section: inventory}
type: list
INVENTORY_IGNORE_PATTERNS:
name: Inventory ignore patterns
default: []
description: List of patterns to ignore when using a directory as an inventory source
env: [{name: ANSIBLE_INVENTORY_IGNORE_REGEX}]
ini:
- {key: inventory_ignore_patterns, section: defaults}
- {key: ignore_patterns, section: inventory}
type: list
INVENTORY_UNPARSED_IS_FAILED:
name: Unparsed Inventory failure
default: False
description: >
If 'true' it is a fatal error if every single potential inventory
source fails to parse, otherwise this situation will only attract a
warning.
env: [{name: ANSIBLE_INVENTORY_UNPARSED_FAILED}]
ini:
- {key: unparsed_is_failed, section: inventory}
type: bool
MAX_FILE_SIZE_FOR_DIFF:
name: Diff maximum file size
default: 104448
description: Maximum size of files to be considered for diff display
env: [{name: ANSIBLE_MAX_DIFF_SIZE}]
ini:
- {key: max_diff_size, section: defaults}
type: int
NETWORK_GROUP_MODULES:
name: Network module families
default: [eos, nxos, ios, iosxr, junos, enos, ce, vyos, sros, dellos9, dellos10, dellos6, asa, aruba, aireos, bigip, ironware, onyx, netconf, exos, voss, slxos]
description: 'TODO: write it'
env:
- name: NETWORK_GROUP_MODULES
deprecated:
why: environment variables without ``ANSIBLE_`` prefix are deprecated
version: "2.12"
alternatives: the ``ANSIBLE_NETWORK_GROUP_MODULES`` environment variable
- name: ANSIBLE_NETWORK_GROUP_MODULES
ini:
- {key: network_group_modules, section: defaults}
type: list
yaml: {key: defaults.network_group_modules}
INJECT_FACTS_AS_VARS:
default: True
description:
- Facts are available inside the `ansible_facts` variable, this setting also pushes them as their own vars in the main namespace.
- Unlike inside the `ansible_facts` dictionary, these will have an `ansible_` prefix.
env: [{name: ANSIBLE_INJECT_FACT_VARS}]
ini:
- {key: inject_facts_as_vars, section: defaults}
type: boolean
version_added: "2.5"
MODULE_IGNORE_EXTS:
name: Module ignore extensions
default: "{{(REJECT_EXTS + ('.yaml', '.yml', '.ini'))}}"
description:
- List of extensions to ignore when looking for modules to load
- This is for rejecting script and binary module fallback extensions
env: [{name: ANSIBLE_MODULE_IGNORE_EXTS}]
ini:
- {key: module_ignore_exts, section: defaults}
type: list
OLD_PLUGIN_CACHE_CLEARING:
description: Previouslly Ansible would only clear some of the plugin loading caches when loading new roles, this led to some behaviours in which a plugin loaded in prevoius plays would be unexpectedly 'sticky'. This setting allows to return to that behaviour.
env: [{name: ANSIBLE_OLD_PLUGIN_CACHE_CLEAR}]
ini:
- {key: old_plugin_cache_clear, section: defaults}
type: boolean
default: False
version_added: "2.8"
PARAMIKO_HOST_KEY_AUTO_ADD:
# TODO: move to plugin
default: False
description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_HOST_KEY_AUTO_ADD}]
ini:
- {key: host_key_auto_add, section: paramiko_connection}
type: boolean
PARAMIKO_LOOK_FOR_KEYS:
name: look for keys
default: True
description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_LOOK_FOR_KEYS}]
ini:
- {key: look_for_keys, section: paramiko_connection}
type: boolean
PERSISTENT_CONTROL_PATH_DIR:
name: Persistence socket path
default: ~/.ansible/pc
description: Path to socket to be used by the connection persistence system.
env: [{name: ANSIBLE_PERSISTENT_CONTROL_PATH_DIR}]
ini:
- {key: control_path_dir, section: persistent_connection}
type: path
PERSISTENT_CONNECT_TIMEOUT:
name: Persistence timeout
default: 30
description: This controls how long the persistent connection will remain idle before it is destroyed.
env: [{name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT}]
ini:
- {key: connect_timeout, section: persistent_connection}
type: integer
PERSISTENT_CONNECT_RETRY_TIMEOUT:
name: Persistence connection retry timeout
default: 15
description: This controls the retry timeout for persistent connection to connect to the local domain socket.
env: [{name: ANSIBLE_PERSISTENT_CONNECT_RETRY_TIMEOUT}]
ini:
- {key: connect_retry_timeout, section: persistent_connection}
type: integer
PERSISTENT_COMMAND_TIMEOUT:
name: Persistence command timeout
default: 30
description: This controls the amount of time to wait for response from remote device before timing out persistent connection.
env: [{name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT}]
ini:
- {key: command_timeout, section: persistent_connection}
type: int
PLAYBOOK_DIR:
name: playbook dir override for non-playbook CLIs (ala --playbook-dir)
version_added: "2.9"
description:
- A number of non-playbook CLIs have a ``--playbook-dir`` argument; this sets the default value for it.
env: [{name: ANSIBLE_PLAYBOOK_DIR}]
ini: [{key: playbook_dir, section: defaults}]
type: path
PLAYBOOK_VARS_ROOT:
name: playbook vars files root
default: top
version_added: "2.4.1"
description:
- This sets which playbook dirs will be used as a root to process vars plugins, which includes finding host_vars/group_vars
- The ``top`` option follows the traditional behaviour of using the top playbook in the chain to find the root directory.
- The ``bottom`` option follows the 2.4.0 behaviour of using the current playbook to find the root directory.
- The ``all`` option examines from the first parent to the current playbook.
env: [{name: ANSIBLE_PLAYBOOK_VARS_ROOT}]
ini:
- {key: playbook_vars_root, section: defaults}
choices: [ top, bottom, all ]
PLUGIN_FILTERS_CFG:
name: Config file for limiting valid plugins
default: null
version_added: "2.5.0"
description:
- "A path to configuration for filtering which plugins installed on the system are allowed to be used."
- "See :ref:`plugin_filtering_config` for details of the filter file's format."
- " The default is /etc/ansible/plugin_filters.yml"
ini:
- key: plugin_filters_cfg
section: default
deprecated:
why: specifying "plugin_filters_cfg" under the "default" section is deprecated
version: "2.12"
alternatives: the "defaults" section instead
- key: plugin_filters_cfg
section: defaults
type: path
PYTHON_MODULE_RLIMIT_NOFILE:
name: Adjust maximum file descriptor soft limit during Python module execution
description:
- Attempts to set RLIMIT_NOFILE soft limit to the specified value when executing Python modules (can speed up subprocess usage on
Python 2.x. See https://bugs.python.org/issue11284). The value will be limited by the existing hard limit. Default
value of 0 does not attempt to adjust existing system-defined limits.
default: 0
env:
- {name: ANSIBLE_PYTHON_MODULE_RLIMIT_NOFILE}
ini:
- {key: python_module_rlimit_nofile, section: defaults}
vars:
- {name: ansible_python_module_rlimit_nofile}
version_added: '2.8'
RETRY_FILES_ENABLED:
name: Retry files
default: False
description: This controls whether a failed Ansible playbook should create a .retry file.
env: [{name: ANSIBLE_RETRY_FILES_ENABLED}]
ini:
- {key: retry_files_enabled, section: defaults}
type: bool
RETRY_FILES_SAVE_PATH:
name: Retry files path
default: ~
description:
- This sets the path in which Ansible will save .retry files when a playbook fails and retry files are enabled.
- This file will be overwritten after each run with the list of failed hosts from all plays.
env: [{name: ANSIBLE_RETRY_FILES_SAVE_PATH}]
ini:
- {key: retry_files_save_path, section: defaults}
type: path
RUN_VARS_PLUGINS:
name: When should vars plugins run relative to inventory
default: demand
description:
- This setting can be used to optimize vars_plugin usage depending on user's inventory size and play selection.
- Setting to C(demand) will run vars_plugins relative to inventory sources anytime vars are 'demanded' by tasks.
- Setting to C(start) will run vars_plugins relative to inventory sources after importing that inventory source.
env: [{name: ANSIBLE_RUN_VARS_PLUGINS}]
ini:
- {key: run_vars_plugins, section: defaults}
type: str
choices: ['demand', 'start']
version_added: "2.10"
SHOW_CUSTOM_STATS:
name: Display custom stats
default: False
description: 'This adds the custom stats set via the set_stats plugin to the default output'
env: [{name: ANSIBLE_SHOW_CUSTOM_STATS}]
ini:
- {key: show_custom_stats, section: defaults}
type: bool
STRING_TYPE_FILTERS:
name: Filters to preserve strings
default: [string, to_json, to_nice_json, to_yaml, to_nice_yaml, ppretty, json]
description:
- "This list of filters avoids 'type conversion' when templating variables"
- Useful when you want to avoid conversion into lists or dictionaries for JSON strings, for example.
env: [{name: ANSIBLE_STRING_TYPE_FILTERS}]
ini:
- {key: dont_type_filters, section: jinja2}
type: list
SYSTEM_WARNINGS:
name: System warnings
default: True
description:
- Allows disabling of warnings related to potential issues on the system running ansible itself (not on the managed hosts)
- These may include warnings about 3rd party packages or other conditions that should be resolved if possible.
env: [{name: ANSIBLE_SYSTEM_WARNINGS}]
ini:
- {key: system_warnings, section: defaults}
type: boolean
TAGS_RUN:
name: Run Tags
default: []
type: list
description: default list of tags to run in your plays, Skip Tags has precedence.
env: [{name: ANSIBLE_RUN_TAGS}]
ini:
- {key: run, section: tags}
version_added: "2.5"
TAGS_SKIP:
name: Skip Tags
default: []
type: list
description: default list of tags to skip in your plays, has precedence over Run Tags
env: [{name: ANSIBLE_SKIP_TAGS}]
ini:
- {key: skip, section: tags}
version_added: "2.5"
TASK_TIMEOUT:
name: Task Timeout
default: 0
description:
- Set the maximum time (in seconds) that a task can run for.
- If set to 0 (the default) there is no timeout.
env: [{name: ANSIBLE_TASK_TIMEOUT}]
ini:
- {key: task_timeout, section: defaults}
type: integer
version_added: '2.10'
WORKER_SHUTDOWN_POLL_COUNT:
name: Worker Shutdown Poll Count
default: 0
description:
- The maximum number of times to check Task Queue Manager worker processes to verify they have exited cleanly.
- After this limit is reached any worker processes still running will be terminated.
- This is for internal use only.
env: [{name: ANSIBLE_WORKER_SHUTDOWN_POLL_COUNT}]
type: integer
version_added: '2.10'
WORKER_SHUTDOWN_POLL_DELAY:
name: Worker Shutdown Poll Delay
default: 0.1
description:
- The number of seconds to sleep between polling loops when checking Task Queue Manager worker processes to verify they have exited cleanly.
- This is for internal use only.
env: [{name: ANSIBLE_WORKER_SHUTDOWN_POLL_DELAY}]
type: float
version_added: '2.10'
USE_PERSISTENT_CONNECTIONS:
name: Persistence
default: False
description: Toggles the use of persistence for connections.
env: [{name: ANSIBLE_USE_PERSISTENT_CONNECTIONS}]
ini:
- {key: use_persistent_connections, section: defaults}
type: boolean
VARIABLE_PLUGINS_ENABLED:
name: Vars plugin enabled list
default: ['host_group_vars']
description: Whitelist for variable plugins that require it.
env: [{name: ANSIBLE_VARS_ENABLED}]
ini:
- {key: vars_plugins_enabled, section: defaults}
type: list
version_added: "2.10"
VARIABLE_PRECEDENCE:
name: Group variable precedence
default: ['all_inventory', 'groups_inventory', 'all_plugins_inventory', 'all_plugins_play', 'groups_plugins_inventory', 'groups_plugins_play']
description: Allows to change the group variable precedence merge order.
env: [{name: ANSIBLE_PRECEDENCE}]
ini:
- {key: precedence, section: defaults}
type: list
version_added: "2.4"
WIN_ASYNC_STARTUP_TIMEOUT:
name: Windows Async Startup Timeout
default: 5
description:
- For asynchronous tasks in Ansible (covered in Asynchronous Actions and Polling),
this is how long, in seconds, to wait for the task spawned by Ansible to connect back to the named pipe used
on Windows systems. The default is 5 seconds. This can be too low on slower systems, or systems under heavy load.
- This is not the total time an async command can run for, but is a separate timeout to wait for an async command to
start. The task will only start to be timed against its async_timeout once it has connected to the pipe, so the
overall maximum duration the task can take will be extended by the amount specified here.
env: [{name: ANSIBLE_WIN_ASYNC_STARTUP_TIMEOUT}]
ini:
- {key: win_async_startup_timeout, section: defaults}
type: integer
vars:
- {name: ansible_win_async_startup_timeout}
version_added: '2.10'
YAML_FILENAME_EXTENSIONS:
name: Valid YAML extensions
default: [".yml", ".yaml", ".json"]
description:
- "Check all of these extensions when looking for 'variable' files which should be YAML or JSON or vaulted versions of these."
- 'This affects vars_files, include_vars, inventory and vars plugins among others.'
env:
- name: ANSIBLE_YAML_FILENAME_EXT
ini:
- section: defaults
key: yaml_valid_extensions
type: list
NETCONF_SSH_CONFIG:
description: This variable is used to enable bastion/jump host with netconf connection. If set to True the bastion/jump
host ssh settings should be present in ~/.ssh/config file, alternatively it can be set
to custom ssh configuration file path to read the bastion/jump host settings.
env: [{name: ANSIBLE_NETCONF_SSH_CONFIG}]
ini:
- {key: ssh_config, section: netconf_connection}
yaml: {key: netconf_connection.ssh_config}
default: null
STRING_CONVERSION_ACTION:
version_added: '2.8'
description:
- Action to take when a module parameter value is converted to a string (this does not affect variables).
For string parameters, values such as '1.00', "['a', 'b',]", and 'yes', 'y', etc.
will be converted by the YAML parser unless fully quoted.
- Valid options are 'error', 'warn', and 'ignore'.
- Since 2.8, this option defaults to 'warn' but will change to 'error' in 2.12.
default: 'warn'
env:
- name: ANSIBLE_STRING_CONVERSION_ACTION
ini:
- section: defaults
key: string_conversion_action
type: string
VERBOSE_TO_STDERR:
version_added: '2.8'
description:
- Force 'verbose' option to use stderr instead of stdout
default: False
env:
- name: ANSIBLE_VERBOSE_TO_STDERR
ini:
- section: defaults
key: verbose_to_stderr
type: bool
...
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
lib/ansible/playbook/conditional.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import re
from jinja2.compiler import generate
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.module_utils.six import text_type
from ansible.module_utils._text import to_native
from ansible.playbook.attribute import FieldAttribute
from ansible.utils.display import Display
display = Display()
DEFINED_REGEX = re.compile(r'(hostvars\[.+\]|[\w_]+)\s+(not\s+is|is|is\s+not)\s+(defined|undefined)')
LOOKUP_REGEX = re.compile(r'lookup\s*\(')
VALID_VAR_REGEX = re.compile("^[_A-Za-z][_a-zA-Z0-9]*$")
class Conditional:
'''
This is a mix-in class, to be used with Base to allow the object
to be run conditionally when a condition is met or skipped.
'''
_when = FieldAttribute(isa='list', default=list, extend=True, prepend=True)
def __init__(self, loader=None):
# when used directly, this class needs a loader, but we want to
# make sure we don't trample on the existing one if this class
# is used as a mix-in with a playbook base class
if not hasattr(self, '_loader'):
if loader is None:
raise AnsibleError("a loader must be specified when using Conditional() directly")
else:
self._loader = loader
super(Conditional, self).__init__()
def _validate_when(self, attr, name, value):
if not isinstance(value, list):
setattr(self, name, [value])
def extract_defined_undefined(self, conditional):
results = []
cond = conditional
m = DEFINED_REGEX.search(cond)
while m:
results.append(m.groups())
cond = cond[m.end():]
m = DEFINED_REGEX.search(cond)
return results
def evaluate_conditional(self, templar, all_vars):
'''
Loops through the conditionals set on this object, returning
False if any of them evaluate as such.
'''
# since this is a mix-in, it may not have an underlying datastructure
# associated with it, so we pull it out now in case we need it for
# error reporting below
ds = None
if hasattr(self, '_ds'):
ds = getattr(self, '_ds')
result = True
try:
for conditional in self.when:
# do evaluation
if conditional is None or conditional == '':
res = True
elif isinstance(conditional, bool):
res = conditional
else:
res = self._check_conditional(conditional, templar, all_vars)
# only update if still true, preserve false
if result:
result = res
display.debug("Evaluated conditional (%s): %s" % (conditional, res))
if not result:
break
except Exception as e:
raise AnsibleError("The conditional check '%s' failed. The error was: %s" % (to_native(conditional), to_native(e)), obj=ds)
return result
def _check_conditional(self, conditional, templar, all_vars):
'''
This method does the low-level evaluation of each conditional
set on this object, using jinja2 to wrap the conditionals for
evaluation.
'''
original = conditional
if templar.is_template(conditional):
display.warning('conditional statements should not include jinja2 '
'templating delimiters such as {{ }} or {%% %%}. '
'Found: %s' % conditional)
bare_vars_warning = False
if C.CONDITIONAL_BARE_VARS:
if conditional in all_vars and VALID_VAR_REGEX.match(conditional):
conditional = all_vars[conditional]
bare_vars_warning = True
# make sure the templar is using the variables specified with this method
templar.available_variables = all_vars
try:
# if the conditional is "unsafe", disable lookups
disable_lookups = hasattr(conditional, '__UNSAFE__')
conditional = templar.template(conditional, disable_lookups=disable_lookups)
if bare_vars_warning and not isinstance(conditional, bool):
display.deprecated('evaluating %r as a bare variable, this behaviour will go away and you might need to add " | bool"'
' (if you would like to evaluate input string from prompt) or " is truthy"'
' (if you would like to apply Python\'s evaluation method) to the expression in the future. '
'Also see CONDITIONAL_BARE_VARS configuration toggle' % original,
version="2.12", collection_name='ansible.builtin')
if not isinstance(conditional, text_type) or conditional == "":
return conditional
# update the lookups flag, as the string returned above may now be unsafe
# and we don't want future templating calls to do unsafe things
disable_lookups |= hasattr(conditional, '__UNSAFE__')
# First, we do some low-level jinja2 parsing involving the AST format of the
# statement to ensure we don't do anything unsafe (using the disable_lookup flag above)
class CleansingNodeVisitor(ast.NodeVisitor):
def generic_visit(self, node, inside_call=False, inside_yield=False):
if isinstance(node, ast.Call):
inside_call = True
elif isinstance(node, ast.Yield):
inside_yield = True
elif isinstance(node, ast.Str):
if disable_lookups:
if inside_call and node.s.startswith("__"):
# calling things with a dunder is generally bad at this point...
raise AnsibleError(
"Invalid access found in the conditional: '%s'" % conditional
)
elif inside_yield:
# we're inside a yield, so recursively parse and traverse the AST
# of the result to catch forbidden syntax from executing
parsed = ast.parse(node.s, mode='exec')
cnv = CleansingNodeVisitor()
cnv.visit(parsed)
# iterate over all child nodes
for child_node in ast.iter_child_nodes(node):
self.generic_visit(
child_node,
inside_call=inside_call,
inside_yield=inside_yield
)
try:
res = templar.environment.parse(conditional, None, None)
res = generate(res, templar.environment, None, None)
parsed = ast.parse(res, mode='exec')
cnv = CleansingNodeVisitor()
cnv.visit(parsed)
except Exception as e:
raise AnsibleError("Invalid conditional detected: %s" % to_native(e))
# and finally we generate and template the presented string and look at the resulting string
presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
val = templar.template(presented, disable_lookups=disable_lookups).strip()
if val == "True":
return True
elif val == "False":
return False
else:
raise AnsibleError("unable to evaluate conditional: %s" % original)
except (AnsibleUndefinedVariable, UndefinedError) as e:
# the templating failed, meaning most likely a variable was undefined. If we happened
# to be looking for an undefined variable, return True, otherwise fail
try:
# first we extract the variable name from the error message
var_name = re.compile(r"'(hostvars\[.+\]|[\w_]+)' is undefined").search(str(e)).groups()[0]
# next we extract all defined/undefined tests from the conditional string
def_undef = self.extract_defined_undefined(conditional)
# then we loop through these, comparing the error variable name against
# each def/undef test we found above. If there is a match, we determine
# whether the logic/state mean the variable should exist or not and return
# the corresponding True/False
for (du_var, logic, state) in def_undef:
# when we compare the var names, normalize quotes because something
# like hostvars['foo'] may be tested against hostvars["foo"]
if var_name.replace("'", '"') == du_var.replace("'", '"'):
# the should exist is a xor test between a negation in the logic portion
# against the state (defined or undefined)
should_exist = ('not' in logic) != (state == 'defined')
if should_exist:
return False
else:
return True
# as nothing above matched the failed var name, re-raise here to
# trigger the AnsibleUndefinedVariable exception again below
raise
except Exception:
raise AnsibleUndefinedVariable("error while evaluating conditional (%s): %s" % (original, e))
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
test/integration/targets/conditionals/play.yml
|
# (c) 2014, James Cammarata <[email protected]>
# (c) 2019, Ansible Project
- hosts: testhost
gather_facts: false
vars_files:
- vars/main.yml
tasks:
- name: set conditional bare vars status
set_fact:
bare: "{{lookup('config', 'CONDITIONAL_BARE_VARS')|bool}}"
- name: test conditional '=='
shell: echo 'testing'
when: 1 == 1
register: result
- name: assert conditional '==' ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test bad conditional '=='
shell: echo 'testing'
when: 0 == 1
register: result
- name: assert bad conditional '==' did NOT run
assert:
that:
- result is skipped
- name: test conditional '!='
shell: echo 'testing'
when: 0 != 1
register: result
- name: assert conditional '!=' ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test bad conditional '!='
shell: echo 'testing'
when: 1 != 1
register: result
- name: assert bad conditional '!=' did NOT run
assert:
that:
- result is skipped
- name: test conditional 'in'
shell: echo 'testing'
when: 1 in [1,2,3]
register: result
- name: assert conditional 'in' ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test bad conditional 'in'
shell: echo 'testing'
when: 1 in [7,8,9]
register: result
- name: assert bad conditional 'in' did NOT run
assert:
that:
- result is skipped
- name: test conditional 'not in'
shell: echo 'testing'
when: 0 not in [1,2,3]
register: result
- name: assert conditional 'not in' ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test bad conditional 'not in'
shell: echo 'testing'
when: 1 not in [1,2,3]
register: result
- name: assert bad conditional 'not in' did NOT run
assert:
that:
- result is skipped
- name: test conditional 'is defined'
shell: echo 'testing'
when: test_bare is defined
register: result
- name: assert conditional 'is defined' ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test bad conditional 'is defined'
shell: echo 'testing'
when: foo_asdf_xyz is defined
register: result
- name: assert bad conditional 'is defined' did NOT run
assert:
that:
- result is skipped
- name: test conditional 'is not defined'
shell: echo 'testing'
when: foo_asdf_xyz is not defined
register: result
- name: assert conditional 'is not defined' ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test bad conditional 'is not defined'
shell: echo 'testing'
when: test_bare is not defined
register: result
- name: assert bad conditional 'is not defined' did NOT run
assert:
that:
- result is skipped
- name: test bad conditional 'is undefined'
shell: echo 'testing'
when: test_bare is undefined
register: result
- name: assert bad conditional 'is undefined' did NOT run
assert:
that:
- result is skipped
- name: test bare conditional
shell: echo 'testing'
when: test_bare
register: result
- name: assert bare conditional ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test conditional using a variable
shell: echo 'testing'
when: test_bare_var == 123
register: result
- name: assert conditional using a variable ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test good conditional based on nested variables
shell: echo 'testing'
when: test_bare_nested_good
register: result
- name: assert good conditional based on nested var ran
assert:
that:
- result is changed
- "result.stdout == 'testing'"
- "result.rc == 0"
- name: test bad conditional based on nested variables
shell: echo 'testing'
when: test_bare_nested_bad
register: result
- debug: var={{item}}
loop:
- bare
- result
- test_bare_nested_bad
- name: assert that the bad nested conditional is skipped since 'bare' since 'string' template is resolved to 'false'
assert:
that:
- result is skipped
when: bare|bool
- name: assert that the bad nested conditional did run since non bare 'string' is untemplated but 'trueish'
assert:
that:
- result is skipped
when: not bare|bool
- result is changed
- name: test bad conditional based on nested variables with bool filter
shell: echo 'testing'
when: test_bare_nested_bad|bool
register: result
- name: assert that the bad nested conditional did NOT run as bool forces evaluation
assert:
that:
- result is skipped
#-----------------------------------------------------------------------
# proper booleanification tests (issue #8629)
- name: set fact to string 'false'
set_fact: bool_test1=false
- name: set fact to string 'False'
set_fact: bool_test2=False
- name: set fact to a proper boolean using complex args
set_fact:
bool_test3: false
- name: "test boolean value 'false' string using 'when: var'"
command: echo 'hi'
when: bool_test1
register: result
- name: assert that the task did not run for 'false'
assert:
that:
- result is skipped
- name: "test boolean value 'false' string using 'when: not var'"
command: echo 'hi'
when: not bool_test1
register: result
- name: assert that the task DID run for not 'false'
assert:
that:
- result is changed
- name: "test boolean value of 'False' string using 'when: var'"
command: echo 'hi'
when: bool_test2
register: result
- name: assert that the task did not run for 'False'
assert:
that:
- result is skipped
- name: "test boolean value 'False' string using 'when: not var'"
command: echo 'hi'
when: not bool_test2
register: result
- name: assert that the task DID run for not 'False'
assert:
that:
- result is changed
- name: "test proper boolean value of complex arg using 'when: var'"
command: echo 'hi'
when: bool_test3
register: result
- name: assert that the task did not run for proper boolean false
assert:
that:
- result is skipped
- name: "test proper boolean value of complex arg using 'when: not var'"
command: echo 'hi'
when: not bool_test3
register: result
- name: assert that the task DID run for not false
assert:
that:
- result is changed
- set_fact: skipped_bad_attribute=True
- block:
- name: test a with_items loop using a variable with a missing attribute
debug: var=item
with_items: "{{cond_bad_attribute.results | default('')}}"
register: result
- set_fact: skipped_bad_attribute=False
- name: assert the task was skipped
assert:
that:
- skipped_bad_attribute
when: cond_bad_attribute is defined and 'results' in cond_bad_attribute
- name: test a with_items loop skipping a single item
debug: var=item
with_items: "{{cond_list_of_items.results}}"
when: item != 'b'
register: result
- debug: var=result
- name: assert only a single item was skipped
assert:
that:
- result.results|length == 3
- result.results[1].skipped
- name: test complex templated condition
debug: msg="it works"
when: vars_file_var in things1|union([vars_file_var])
- name: test dict with invalid key is undefined
vars:
mydict:
a: foo
b: bar
debug: var=mydict['c']
register: result
when: mydict['c'] is undefined
- name: assert the task did not fail
assert:
that:
- result is success
- name: test dict with invalid key does not run with conditional is defined
vars:
mydict:
a: foo
b: bar
debug: var=mydict['c']
when: mydict['c'] is defined
register: result
- name: assert the task was skipped
assert:
that:
- result is skipped
- name: test list with invalid element does not run with conditional is defined
vars:
mylist: []
debug: var=mylist[0]
when: mylist[0] is defined
register: result
- name: assert the task was skipped
assert:
that:
- result is skipped
- name: test list with invalid element is undefined
vars:
mylist: []
debug: var=mylist[0]
when: mylist[0] is undefined
register: result
- name: assert the task did not fail
assert:
that:
- result is success
- name: Deal with multivar equality
tags: ['leveldiff']
when: not bare|bool
vars:
toplevel_hash:
hash_var_one: justastring
hash_var_two: something.with.dots
hash_var_three: something:with:colons
hash_var_four: something/with/slashes
hash_var_five: something with spaces
hash_var_six: yes
hash_var_seven: no
toplevel_var_one: justastring
toplevel_var_two: something.with.dots
toplevel_var_three: something:with:colons
toplevel_var_four: something/with/slashes
toplevel_var_five: something with spaces
toplevel_var_six: yes
toplevel_var_seven: no
block:
- name: var subkey simple string
debug:
var: toplevel_hash.hash_var_one
register: sub
when: toplevel_hash.hash_var_one
- name: toplevel simple string
debug:
var: toplevel_var_one
when: toplevel_var_one
register: top
ignore_errors: yes
- name: ensure top and multi work same
assert:
that:
- top is not skipped
- sub is not skipped
- top is not failed
- sub is not failed
- name: var subkey string with dots
debug:
var: toplevel_hash.hash_var_two
register: sub
when: toplevel_hash.hash_var_two
- debug:
var: toplevel_var_two
when: toplevel_var_two
register: top
ignore_errors: yes
- name: ensure top and multi work same
assert:
that:
- top is not skipped
- sub is not skipped
- top is not failed
- sub is not failed
- name: var subkey string with dots
debug:
var: toplevel_hash.hash_var_three
register: sub
when: toplevel_hash.hash_var_three
- debug:
var: toplevel_var_three
when: toplevel_var_three
register: top
ignore_errors: yes
- name: ensure top and multi work same
assert:
that:
- top is not skipped
- sub is not skipped
- top is not failed
- sub is not failed
- name: var subkey string with colon
debug:
var: toplevel_hash.hash_var_four
register: sub
when: toplevel_hash.hash_var_four
- debug:
var: toplevel_var_four
when: toplevel_var_four
register: top
ignore_errors: yes
- name: ensure top and multi work same
assert:
that:
- top is not skipped
- sub is not skipped
- top is not failed
- sub is not failed
- name: var subkey string with spaces
debug:
var: toplevel_hash.hash_var_five
register: sub
when: toplevel_hash.hash_var_five
- debug:
var: toplevel_var_five
when: toplevel_var_five
register: top
ignore_errors: yes
- name: ensure top and multi work same
assert:
that:
- top is not skipped
- sub is not skipped
- top is not failed
- sub is not failed
- name: var subkey with 'yes' value
debug:
var: toplevel_hash.hash_var_six
register: sub
when: toplevel_hash.hash_var_six
- debug:
var: toplevel_var_six
register: top
when: toplevel_var_six
- name: ensure top and multi work same
assert:
that:
- top is not skipped
- sub is not skipped
- name: var subkey with 'no' value
debug:
var: toplevel_hash.hash_var_seven
register: sub
when: toplevel_hash.hash_var_seven
- debug:
var: toplevel_var_seven
register: top
when: toplevel_var_seven
- name: ensure top and multi work same
assert:
that:
- top is skipped
- sub is skipped
- name: test that 'comparison expression' item works with_items
assert:
that:
- item
with_items:
- 1 == 1
- name: test that 'comparison expression' item works in loop
assert:
that:
- item
loop:
- 1 == 1
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
test/integration/targets/conditionals/runme.sh
|
#!/usr/bin/env bash
set -eux
ANSIBLE_CONDITIONAL_BARE_VARS=1 ansible-playbook -i ../../inventory play.yml "$@"
ANSIBLE_CONDITIONAL_BARE_VARS=0 ansible-playbook -i ../../inventory play.yml "$@"
export ANSIBLE_CONDITIONAL_BARE_VARS=1
export ANSIBLE_DEPRECATION_WARNINGS=True
# No warnings for conditionals that are already type bool
test "$(ansible-playbook -i ../../inventory test_no_warnings.yml "$@" 2>&1 | grep -c '\[DEPRECATION WARNING\]')" = 0
# Warn for bare vars of other types since they may be interpreted differently when CONDITIONAL_BARE_VARS defaults to False
test "$(ansible-playbook -i ../../inventory test_warnings.yml "$@" 2>&1 | grep -c '\[DEPRECATION WARNING\]')" = 2
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
test/integration/targets/conditionals/test_no_warnings.yml
|
- hosts: testhost
gather_facts: false
vars:
boolean_var: false
nested:
bool_var: false
tasks:
- name: Run tasks with previous warnings requesting the bool filter on type boolean vars
block:
- debug:
when: boolean_var
- debug:
when: nested.bool_var
- debug:
when: double_interpolated
vars:
double_interpolated: "{{ other }}"
other: false
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
test/integration/targets/conditionals/test_warnings.yml
|
- hosts: testhost
gather_facts: false
vars:
str_boolean_var: 'false'
tasks:
- name: Run tasks with warnings for conditionals that will change in behavior depending on CONDITIONAL_BARE_VARS
block:
- debug:
when: str_boolean_var
- debug:
when: double_interpolated
vars:
double_interpolated: other
other: false
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
test/integration/targets/conditionals/vars/main.yml
|
---
# foo is a dictionary that will be used to check that
# a conditional passes a with_items loop on a variable
# with a missing attribute (ie. foo.results)
cond_bad_attribute:
bar: a
cond_list_of_items:
results:
- a
- b
- c
things1:
- 1
- 2
vars_file_var: 321
test_bare: true
test_bare_var: 123
test_bare_nested_good: "test_bare_var == 123"
test_bare_nested_bad: "{{test_bare_var}} == 321"
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,134 |
conditional contains deprecated call to be removed in 2.12
|
##### SUMMARY
conditional contains call to Display.deprecated or AnsibleModule.deprecate and is scheduled for removal
```
lib/ansible/playbook/conditional.py:144:16: ansible-deprecated-version: Deprecated version ('2.12') found in call to Display.deprecated or AnsibleModule.deprecate (0%)
```
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
```
lib/ansible/playbook/conditional.py
```
##### ANSIBLE VERSION
```
2.12
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A
##### STEPS TO REPRODUCE
N/A
##### EXPECTED RESULTS
N/A
##### ACTUAL RESULTS
N/A
|
https://github.com/ansible/ansible/issues/74134
|
https://github.com/ansible/ansible/pull/74208
|
ce96591313b06563ede8adfd68a5cd7453eb9e02
|
19aeb4706d7f7a984daf2a70f16d69a7dccde484
| 2021-04-05T20:33:56Z |
python
| 2021-04-13T15:12:10Z |
test/sanity/ignore.txt
|
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
examples/play.yml shebang
examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
lib/ansible/cli/console.py pylint:blacklisted-name
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/cli/scripts/ansible_cli_stub.py pylint:ansible-deprecated-version
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
lib/ansible/keyword_desc.yml no-unwanted-files
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.FileUtil.psm1 pslint:PSProvideCommentHelp
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSCustomUseLiteralPath
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.LinkUtil.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/pycompat24.py no-get-exception
lib/ansible/module_utils/six/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/six/__init__.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/six/__init__.py no-basestring
lib/ansible/module_utils/six/__init__.py no-dict-iteritems
lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
lib/ansible/module_utils/urls.py pylint:blacklisted-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/apt_repository.py validate-modules:parameter-invalid
lib/ansible/modules/assemble.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/async_status.py use-argspec-type-path
lib/ansible/modules/async_status.py validate-modules!skip
lib/ansible/modules/async_wrapper.py ansible-doc!skip # not an actual module
lib/ansible/modules/async_wrapper.py pylint:ansible-bad-function # ignore, required
lib/ansible/modules/async_wrapper.py use-argspec-type-path
lib/ansible/modules/blockinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/blockinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec # _uses_shell is undocumented
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
lib/ansible/modules/copy.py pylint:blacklisted-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
lib/ansible/modules/dnf.py validate-modules:doc-required-mismatch
lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
lib/ansible/modules/git.py pylint:blacklisted-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/iptables.py pylint:blacklisted-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/pip.py pylint:blacklisted-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:use-run-command-not-popen
lib/ansible/modules/stat.py validate-modules:doc-default-does-not-match-spec # get_md5 is undocumented
lib/ansible/modules/stat.py validate-modules:parameter-invalid
lib/ansible/modules/stat.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/stat.py validate-modules:undocumented-parameter
lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/uri.py pylint:blacklisted-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
lib/ansible/modules/yum.py pylint:blacklisted-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/conditional.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:ansible-deprecated-version
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/play_context.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/async_status.py pylint:ansible-deprecated-version
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
lib/ansible/plugins/inventory/script.py pylint:ansible-deprecated-version
lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
lib/ansible/plugins/strategy/__init__.py pylint:ansible-deprecated-version
lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
lib/ansible/vars/hostvars.py pylint:blacklisted-name
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-function # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
test/integration/targets/template/files/foo.dos.txt line-endings
test/integration/targets/template/templates/encoding_1252.j2 no-smart-quotes
test/integration/targets/unicode/unicode.yml no-smart-quotes
test/integration/targets/win_exec_wrapper/library/test_fail.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_exec_wrapper/tasks/main.yml no-smart-quotes # We are explicitly testing smart quote support for env vars
test/integration/targets/win_fetch/tasks/main.yml no-smart-quotes # We are explictly testing smart quotes in the file name to fetch
test/integration/targets/win_module_utils/library/legacy_only_new_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_module_utils/library/legacy_only_old_way_win_line_ending.ps1 line-endings # Explicitly tests that we still work with Windows line endings
test/integration/targets/win_script/files/test_script.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_creates_file.ps1 pslint:PSAvoidUsingCmdletAliases
test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PSCustomUseLiteralPath
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
test/support/integration/plugins/module_utils/cloud.py pylint:isinstance-second-argument-not-valid-type
test/support/integration/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/integration/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/integration/plugins/module_utils/database.py future-import-boilerplate
test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_dsc.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
test/units/executor/test_play_iterator.py pylint:blacklisted-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
test/units/modules/test_apt.py pylint:blacklisted-name
test/units/parsing/vault/test_vault.py pylint:blacklisted-name
test/units/playbook/role/test_role.py pylint:blacklisted-name
test/units/plugins/test_plugins.py pylint:blacklisted-name
test/units/template/test_templar.py pylint:blacklisted-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
test/utils/shippable/check_matrix.py replace-urlopen
test/utils/shippable/timing.py shebang
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,228 |
AttributeError: module 'distro' has no attribute 'id' when using the group and user modules
|
### Summary
When I try to create a group using the `group` module I get the `AttributeError: module 'distro' has no attribute 'id'` apearing on line 31 of `ansible/module_utils/common/sys_info.py`.
It doesn't reproduce on a Vagrant VM. So it may be something weird going ont with my own server.
### Issue Type
Bug Report
### Component Name
group
### Ansible Version
```console
ansible 2.9.18
config file = /etc/ansible/ansible.cfg
configured module search path = ['$HOME/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.2 (default, Feb 20 2021, 00:00:00) [GCC 10.2.1 20201125 (Red Hat 10.2.1-9)]
```
### Configuration
```console
DEFAULT_ROLES_PATH(env: ANSIBLE_ROLES_PATH) = ['$HOME/.ansible/roles']
```
### OS / Environment
Fedora 33
### Steps to Reproduce
```yaml
- name: Create funkwhale group
group:
name: "{{ funkwhale_group }}"
```
### Expected Results
I expected the group to be created.
### Actual Results
```console
I get the following traceback
The full traceback is:
Traceback (most recent call last):
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 102, in <module>
_ansiballz_main()
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 94, in _ansiballz_main
invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS)
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 40, in invoke_module
runpy.run_module(mod_name='ansible.modules.system.group', init_globals=None, run_name='__main__', alter_sys=True)
File "/usr/lib64/python3.9/runpy.py", line 210, in run_module
return _run_module_code(code, init_globals, run_name, mod_spec)
File "/usr/lib64/python3.9/runpy.py", line 97, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "/usr/lib64/python3.9/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 627, in <module>
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 575, in main
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 102, in __new__
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/basic.py", line 305, in load_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 144, in get_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 31, in get_distribution
AttributeError: module 'distro' has no attribute 'id'
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74228
|
https://github.com/ansible/ansible/pull/74229
|
aae5bc5b9eaf3e4ae6d14dcba146fc39c5cf31e5
|
fa0bccf6a1f09c0118b1609aadd10f85927f5edb
| 2021-04-11T17:58:59Z |
python
| 2021-04-13T15:27:52Z |
changelogs/fragments/nonstandard-distro-fallback.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,228 |
AttributeError: module 'distro' has no attribute 'id' when using the group and user modules
|
### Summary
When I try to create a group using the `group` module I get the `AttributeError: module 'distro' has no attribute 'id'` apearing on line 31 of `ansible/module_utils/common/sys_info.py`.
It doesn't reproduce on a Vagrant VM. So it may be something weird going ont with my own server.
### Issue Type
Bug Report
### Component Name
group
### Ansible Version
```console
ansible 2.9.18
config file = /etc/ansible/ansible.cfg
configured module search path = ['$HOME/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.2 (default, Feb 20 2021, 00:00:00) [GCC 10.2.1 20201125 (Red Hat 10.2.1-9)]
```
### Configuration
```console
DEFAULT_ROLES_PATH(env: ANSIBLE_ROLES_PATH) = ['$HOME/.ansible/roles']
```
### OS / Environment
Fedora 33
### Steps to Reproduce
```yaml
- name: Create funkwhale group
group:
name: "{{ funkwhale_group }}"
```
### Expected Results
I expected the group to be created.
### Actual Results
```console
I get the following traceback
The full traceback is:
Traceback (most recent call last):
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 102, in <module>
_ansiballz_main()
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 94, in _ansiballz_main
invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS)
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 40, in invoke_module
runpy.run_module(mod_name='ansible.modules.system.group', init_globals=None, run_name='__main__', alter_sys=True)
File "/usr/lib64/python3.9/runpy.py", line 210, in run_module
return _run_module_code(code, init_globals, run_name, mod_spec)
File "/usr/lib64/python3.9/runpy.py", line 97, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "/usr/lib64/python3.9/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 627, in <module>
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 575, in main
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 102, in __new__
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/basic.py", line 305, in load_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 144, in get_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 31, in get_distribution
AttributeError: module 'distro' has no attribute 'id'
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74228
|
https://github.com/ansible/ansible/pull/74229
|
aae5bc5b9eaf3e4ae6d14dcba146fc39c5cf31e5
|
fa0bccf6a1f09c0118b1609aadd10f85927f5edb
| 2021-04-11T17:58:59Z |
python
| 2021-04-13T15:27:52Z |
lib/ansible/module_utils/distro/__init__.py
|
# (c) 2018 Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat distro library.
'''
# The following makes it easier for us to script updates of the bundled code
_BUNDLED_METADATA = {"pypi_name": "distro", "version": "1.5.0"}
# The following additional changes have been made:
# * Remove optparse since it is not needed for our use.
# * A format string including {} has been changed to {0} (py2.6 compat)
# * Port two calls from subprocess.check_output to subprocess.Popen().communicate() (py2.6 compat)
import sys
try:
import distro as _system_distro
except ImportError:
_system_distro = None
if _system_distro:
distro = _system_distro
else:
# Our bundled copy
from ansible.module_utils.distro import _distro as distro
sys.modules['ansible.module_utils.distro'] = distro
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,228 |
AttributeError: module 'distro' has no attribute 'id' when using the group and user modules
|
### Summary
When I try to create a group using the `group` module I get the `AttributeError: module 'distro' has no attribute 'id'` apearing on line 31 of `ansible/module_utils/common/sys_info.py`.
It doesn't reproduce on a Vagrant VM. So it may be something weird going ont with my own server.
### Issue Type
Bug Report
### Component Name
group
### Ansible Version
```console
ansible 2.9.18
config file = /etc/ansible/ansible.cfg
configured module search path = ['$HOME/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.2 (default, Feb 20 2021, 00:00:00) [GCC 10.2.1 20201125 (Red Hat 10.2.1-9)]
```
### Configuration
```console
DEFAULT_ROLES_PATH(env: ANSIBLE_ROLES_PATH) = ['$HOME/.ansible/roles']
```
### OS / Environment
Fedora 33
### Steps to Reproduce
```yaml
- name: Create funkwhale group
group:
name: "{{ funkwhale_group }}"
```
### Expected Results
I expected the group to be created.
### Actual Results
```console
I get the following traceback
The full traceback is:
Traceback (most recent call last):
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 102, in <module>
_ansiballz_main()
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 94, in _ansiballz_main
invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS)
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 40, in invoke_module
runpy.run_module(mod_name='ansible.modules.system.group', init_globals=None, run_name='__main__', alter_sys=True)
File "/usr/lib64/python3.9/runpy.py", line 210, in run_module
return _run_module_code(code, init_globals, run_name, mod_spec)
File "/usr/lib64/python3.9/runpy.py", line 97, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "/usr/lib64/python3.9/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 627, in <module>
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 575, in main
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 102, in __new__
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/basic.py", line 305, in load_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 144, in get_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 31, in get_distribution
AttributeError: module 'distro' has no attribute 'id'
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74228
|
https://github.com/ansible/ansible/pull/74229
|
aae5bc5b9eaf3e4ae6d14dcba146fc39c5cf31e5
|
fa0bccf6a1f09c0118b1609aadd10f85927f5edb
| 2021-04-11T17:58:59Z |
python
| 2021-04-13T15:27:52Z |
test/integration/targets/module_utils_distro/aliases
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,228 |
AttributeError: module 'distro' has no attribute 'id' when using the group and user modules
|
### Summary
When I try to create a group using the `group` module I get the `AttributeError: module 'distro' has no attribute 'id'` apearing on line 31 of `ansible/module_utils/common/sys_info.py`.
It doesn't reproduce on a Vagrant VM. So it may be something weird going ont with my own server.
### Issue Type
Bug Report
### Component Name
group
### Ansible Version
```console
ansible 2.9.18
config file = /etc/ansible/ansible.cfg
configured module search path = ['$HOME/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.2 (default, Feb 20 2021, 00:00:00) [GCC 10.2.1 20201125 (Red Hat 10.2.1-9)]
```
### Configuration
```console
DEFAULT_ROLES_PATH(env: ANSIBLE_ROLES_PATH) = ['$HOME/.ansible/roles']
```
### OS / Environment
Fedora 33
### Steps to Reproduce
```yaml
- name: Create funkwhale group
group:
name: "{{ funkwhale_group }}"
```
### Expected Results
I expected the group to be created.
### Actual Results
```console
I get the following traceback
The full traceback is:
Traceback (most recent call last):
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 102, in <module>
_ansiballz_main()
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 94, in _ansiballz_main
invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS)
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 40, in invoke_module
runpy.run_module(mod_name='ansible.modules.system.group', init_globals=None, run_name='__main__', alter_sys=True)
File "/usr/lib64/python3.9/runpy.py", line 210, in run_module
return _run_module_code(code, init_globals, run_name, mod_spec)
File "/usr/lib64/python3.9/runpy.py", line 97, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "/usr/lib64/python3.9/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 627, in <module>
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 575, in main
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 102, in __new__
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/basic.py", line 305, in load_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 144, in get_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 31, in get_distribution
AttributeError: module 'distro' has no attribute 'id'
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74228
|
https://github.com/ansible/ansible/pull/74229
|
aae5bc5b9eaf3e4ae6d14dcba146fc39c5cf31e5
|
fa0bccf6a1f09c0118b1609aadd10f85927f5edb
| 2021-04-11T17:58:59Z |
python
| 2021-04-13T15:27:52Z |
test/integration/targets/module_utils_distro/meta/main.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,228 |
AttributeError: module 'distro' has no attribute 'id' when using the group and user modules
|
### Summary
When I try to create a group using the `group` module I get the `AttributeError: module 'distro' has no attribute 'id'` apearing on line 31 of `ansible/module_utils/common/sys_info.py`.
It doesn't reproduce on a Vagrant VM. So it may be something weird going ont with my own server.
### Issue Type
Bug Report
### Component Name
group
### Ansible Version
```console
ansible 2.9.18
config file = /etc/ansible/ansible.cfg
configured module search path = ['$HOME/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.2 (default, Feb 20 2021, 00:00:00) [GCC 10.2.1 20201125 (Red Hat 10.2.1-9)]
```
### Configuration
```console
DEFAULT_ROLES_PATH(env: ANSIBLE_ROLES_PATH) = ['$HOME/.ansible/roles']
```
### OS / Environment
Fedora 33
### Steps to Reproduce
```yaml
- name: Create funkwhale group
group:
name: "{{ funkwhale_group }}"
```
### Expected Results
I expected the group to be created.
### Actual Results
```console
I get the following traceback
The full traceback is:
Traceback (most recent call last):
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 102, in <module>
_ansiballz_main()
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 94, in _ansiballz_main
invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS)
File "/home/fedora/.ansible/tmp/ansible-tmp-1618162665.1532395-4847-266323418990696/AnsiballZ_group.py", line 40, in invoke_module
runpy.run_module(mod_name='ansible.modules.system.group', init_globals=None, run_name='__main__', alter_sys=True)
File "/usr/lib64/python3.9/runpy.py", line 210, in run_module
return _run_module_code(code, init_globals, run_name, mod_spec)
File "/usr/lib64/python3.9/runpy.py", line 97, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "/usr/lib64/python3.9/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 627, in <module>
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 575, in main
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/modules/system/group.py", line 102, in __new__
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/basic.py", line 305, in load_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 144, in get_platform_subclass
File "/tmp/ansible_group_payload_ee29ndup/ansible_group_payload.zip/ansible/module_utils/common/sys_info.py", line 31, in get_distribution
AttributeError: module 'distro' has no attribute 'id'
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74228
|
https://github.com/ansible/ansible/pull/74229
|
aae5bc5b9eaf3e4ae6d14dcba146fc39c5cf31e5
|
fa0bccf6a1f09c0118b1609aadd10f85927f5edb
| 2021-04-11T17:58:59Z |
python
| 2021-04-13T15:27:52Z |
test/integration/targets/module_utils_distro/runme.sh
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,996 |
ERROR! Unexpected Exception, this is probably a bug: maximum recursion depth exceeded
|
### Summary
Hi
I have tried to reproduce live-ish output for the long running job with role in shell but after exactly 240 tries I am getting
`ERROR! Unexpected Exception, this is probably a bug: maximum recursion depth exceeded`
tried python2 and 3 as interpreter on version ansible 2.10.7 as well as 2.9.19.post0
### Issue Type
Bug Report
### Component Name
simple sh scrpit with tail on it
### Ansible Version
```console (paste below)
$ ansible --version
/usr/local/lib/python2.7/dist-packages/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography, and will be removed in the next release.
from cryptography.exceptions import InvalidSignature
ansible 2.10.7
config file = /home/jenkins/ethernal-loop/ansible.cfg
configured module search path = [u'/home/jenkins/.ansible/plugins/modules', u'/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python2.7/dist-packages/ansible
executable location = /usr/local/bin/ansible
python version = 2.7.18 (default, Mar 8 2021, 13:02:45) [GCC 9.3.0]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
/usr/local/lib/python2.7/dist-packages/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography, and will be removed in the next release.
from cryptography.exceptions import InvalidSignature
DEFAULT_CALLBACK_WHITELIST(/home/jenkins/ethernal-loop/ansible.cfg) = [u'profile_tasks']
INTERPRETER_PYTHON(/home/jenkins/ethernal-loop/ansible.cfg) = /usr/bin/python3
```
### OS / Environment
docker
image ubuntu 20.04
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
#copy below repo and run
git clone [email protected]:matsonkepson/ethernal-loop.git
ansible-playbook main.yml -vvv
```
### Expected Results
Loop is runs for given time like in main.yml > include_role >long_run_timeout
### Actual Results
```console (paste below)
ERROR! Unexpected Exception, this is probably a bug: maximum recursion depth exceeded
the full traceback was:
Traceback (most recent call last):
File "/usr/local/bin/ansible-playbook", line 123, in <module>
exit_code = cli.run()
File "/usr/local/lib/python2.7/dist-packages/ansible/cli/playbook.py", line 129, in run
results = pbex.run()
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/playbook_executor.py", line 169, in run
result = self._tqm.run(play=play)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/task_queue_manager.py", line 281, in run
play_return = strategy.run(iterator, play_context)
File "/usr/local/lib/python2.7/dist-packages/ansible/plugins/strategy/linear.py", line 398, in run
iterator.add_tasks(host, all_blocks[host])
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 567, in add_tasks
self._host_states[host.name] = self._insert_tasks_into_state(self.get_host_state(host), task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 541, in _insert_tasks_into_state
target_block = state._blocks[state.cur_block].copy()
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 216, in copy
new_me.block = _dupe_task_list(self.block or [], new_me)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 185, in _dupe_task_list
new_task = task.copy(exclude_parent=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 216, in copy
new_me.block = _dupe_task_list(self.block or [], new_me)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 187, in _dupe_task_list
new_task._parent = task._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/role_include.py", line 161, in copy
new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/role_include.py", line 161, in copy
new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 401, in copy
new_me = super(Task, self).copy()
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/base.py", line 318, in copy
new_me = self.__class__()
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/role_include.py", line 59, in __init__
super(IncludeRole, self).__init__(block=block, role=role, task_include=task_include)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 55, in __init__
super(TaskInclude, self).__init__(block=block, role=role, task_include=task_include)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 106, in __init__
super(Task, self).__init__()
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/base.py", line 177, in __init__
self._attr_defaults[key] = value()
File "/usr/local/lib/python2.7/dist-packages/ansible/context.py", line 48, in inner
value = CLIARGS.get(key, default=default)
File "/usr/lib/python2.7/_abcoll.py", line 382, in get
return self[key]
RuntimeError: maximum recursion depth exceeded
```
|
https://github.com/ansible/ansible/issues/73996
|
https://github.com/ansible/ansible/pull/73999
|
1082e2ab794940f3114c29199a765edee47aabbe
|
cf4a9fcd0f18d384dbad035bcc418da9cfdec1e1
| 2021-03-22T16:13:21Z |
python
| 2021-04-15T20:52:08Z |
changelogs/fragments/73996-recursion-depth.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,996 |
ERROR! Unexpected Exception, this is probably a bug: maximum recursion depth exceeded
|
### Summary
Hi
I have tried to reproduce live-ish output for the long running job with role in shell but after exactly 240 tries I am getting
`ERROR! Unexpected Exception, this is probably a bug: maximum recursion depth exceeded`
tried python2 and 3 as interpreter on version ansible 2.10.7 as well as 2.9.19.post0
### Issue Type
Bug Report
### Component Name
simple sh scrpit with tail on it
### Ansible Version
```console (paste below)
$ ansible --version
/usr/local/lib/python2.7/dist-packages/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography, and will be removed in the next release.
from cryptography.exceptions import InvalidSignature
ansible 2.10.7
config file = /home/jenkins/ethernal-loop/ansible.cfg
configured module search path = [u'/home/jenkins/.ansible/plugins/modules', u'/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python2.7/dist-packages/ansible
executable location = /usr/local/bin/ansible
python version = 2.7.18 (default, Mar 8 2021, 13:02:45) [GCC 9.3.0]
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
/usr/local/lib/python2.7/dist-packages/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography, and will be removed in the next release.
from cryptography.exceptions import InvalidSignature
DEFAULT_CALLBACK_WHITELIST(/home/jenkins/ethernal-loop/ansible.cfg) = [u'profile_tasks']
INTERPRETER_PYTHON(/home/jenkins/ethernal-loop/ansible.cfg) = /usr/bin/python3
```
### OS / Environment
docker
image ubuntu 20.04
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
#copy below repo and run
git clone [email protected]:matsonkepson/ethernal-loop.git
ansible-playbook main.yml -vvv
```
### Expected Results
Loop is runs for given time like in main.yml > include_role >long_run_timeout
### Actual Results
```console (paste below)
ERROR! Unexpected Exception, this is probably a bug: maximum recursion depth exceeded
the full traceback was:
Traceback (most recent call last):
File "/usr/local/bin/ansible-playbook", line 123, in <module>
exit_code = cli.run()
File "/usr/local/lib/python2.7/dist-packages/ansible/cli/playbook.py", line 129, in run
results = pbex.run()
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/playbook_executor.py", line 169, in run
result = self._tqm.run(play=play)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/task_queue_manager.py", line 281, in run
play_return = strategy.run(iterator, play_context)
File "/usr/local/lib/python2.7/dist-packages/ansible/plugins/strategy/linear.py", line 398, in run
iterator.add_tasks(host, all_blocks[host])
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 567, in add_tasks
self._host_states[host.name] = self._insert_tasks_into_state(self.get_host_state(host), task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 539, in _insert_tasks_into_state
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
File "/usr/local/lib/python2.7/dist-packages/ansible/executor/play_iterator.py", line 541, in _insert_tasks_into_state
target_block = state._blocks[state.cur_block].copy()
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 216, in copy
new_me.block = _dupe_task_list(self.block or [], new_me)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 185, in _dupe_task_list
new_task = task.copy(exclude_parent=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 216, in copy
new_me.block = _dupe_task_list(self.block or [], new_me)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 187, in _dupe_task_list
new_task._parent = task._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/role_include.py", line 161, in copy
new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 408, in copy
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/block.py", line 213, in copy
new_me._parent = self._parent.copy(exclude_tasks=True)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/role_include.py", line 161, in copy
new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 110, in copy
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 401, in copy
new_me = super(Task, self).copy()
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/base.py", line 318, in copy
new_me = self.__class__()
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/role_include.py", line 59, in __init__
super(IncludeRole, self).__init__(block=block, role=role, task_include=task_include)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task_include.py", line 55, in __init__
super(TaskInclude, self).__init__(block=block, role=role, task_include=task_include)
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/task.py", line 106, in __init__
super(Task, self).__init__()
File "/usr/local/lib/python2.7/dist-packages/ansible/playbook/base.py", line 177, in __init__
self._attr_defaults[key] = value()
File "/usr/local/lib/python2.7/dist-packages/ansible/context.py", line 48, in inner
value = CLIARGS.get(key, default=default)
File "/usr/lib/python2.7/_abcoll.py", line 382, in get
return self[key]
RuntimeError: maximum recursion depth exceeded
```
|
https://github.com/ansible/ansible/issues/73996
|
https://github.com/ansible/ansible/pull/73999
|
1082e2ab794940f3114c29199a765edee47aabbe
|
cf4a9fcd0f18d384dbad035bcc418da9cfdec1e1
| 2021-03-22T16:13:21Z |
python
| 2021-04-15T20:52:08Z |
lib/ansible/playbook/base.py
|
# Copyright: (c) 2012-2014, Michael DeHaan <[email protected]>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import itertools
import operator
from copy import copy as shallowcopy
from functools import partial
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible import context
from ansible.module_utils.six import iteritems, string_types, with_metaclass
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils._text import to_text, to_native
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.parsing.dataloader import DataLoader
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
from ansible.utils.vars import combine_vars, isidentifier, get_unique_id
display = Display()
def _generic_g(prop_name, self):
try:
value = self._attributes[prop_name]
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
if value is Sentinel:
value = self._attr_defaults[prop_name]
return value
def _generic_g_method(prop_name, self):
try:
if self._squashed:
return self._attributes[prop_name]
method = "_get_attr_%s" % prop_name
return getattr(self, method)()
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
def _generic_g_parent(prop_name, self):
try:
if self._squashed or self._finalized:
value = self._attributes[prop_name]
else:
try:
value = self._get_parent_attribute(prop_name)
except AttributeError:
value = self._attributes[prop_name]
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
if value is Sentinel:
value = self._attr_defaults[prop_name]
return value
def _generic_s(prop_name, self, value):
self._attributes[prop_name] = value
def _generic_d(prop_name, self):
del self._attributes[prop_name]
class BaseMeta(type):
"""
Metaclass for the Base object, which is used to construct the class
attributes based on the FieldAttributes available.
"""
def __new__(cls, name, parents, dct):
def _create_attrs(src_dict, dst_dict):
'''
Helper method which creates the attributes based on those in the
source dictionary of attributes. This also populates the other
attributes used to keep track of these attributes and via the
getter/setter/deleter methods.
'''
keys = list(src_dict.keys())
for attr_name in keys:
value = src_dict[attr_name]
if isinstance(value, Attribute):
if attr_name.startswith('_'):
attr_name = attr_name[1:]
# here we selectively assign the getter based on a few
# things, such as whether we have a _get_attr_<name>
# method, or if the attribute is marked as not inheriting
# its value from a parent object
method = "_get_attr_%s" % attr_name
if method in src_dict or method in dst_dict:
getter = partial(_generic_g_method, attr_name)
elif ('_get_parent_attribute' in dst_dict or '_get_parent_attribute' in src_dict) and value.inherit:
getter = partial(_generic_g_parent, attr_name)
else:
getter = partial(_generic_g, attr_name)
setter = partial(_generic_s, attr_name)
deleter = partial(_generic_d, attr_name)
dst_dict[attr_name] = property(getter, setter, deleter)
dst_dict['_valid_attrs'][attr_name] = value
dst_dict['_attributes'][attr_name] = Sentinel
dst_dict['_attr_defaults'][attr_name] = value.default
if value.alias is not None:
dst_dict[value.alias] = property(getter, setter, deleter)
dst_dict['_valid_attrs'][value.alias] = value
dst_dict['_alias_attrs'][value.alias] = attr_name
def _process_parents(parents, dst_dict):
'''
Helper method which creates attributes from all parent objects
recursively on through grandparent objects
'''
for parent in parents:
if hasattr(parent, '__dict__'):
_create_attrs(parent.__dict__, dst_dict)
new_dst_dict = parent.__dict__.copy()
new_dst_dict.update(dst_dict)
_process_parents(parent.__bases__, new_dst_dict)
# create some additional class attributes
dct['_attributes'] = {}
dct['_attr_defaults'] = {}
dct['_valid_attrs'] = {}
dct['_alias_attrs'] = {}
# now create the attributes based on the FieldAttributes
# available, including from parent (and grandparent) objects
_create_attrs(dct, dct)
_process_parents(parents, dct)
return super(BaseMeta, cls).__new__(cls, name, parents, dct)
class FieldAttributeBase(with_metaclass(BaseMeta, object)):
def __init__(self):
# initialize the data loader and variable manager, which will be provided
# later when the object is actually loaded
self._loader = None
self._variable_manager = None
# other internal params
self._validated = False
self._squashed = False
self._finalized = False
# every object gets a random uuid:
self._uuid = get_unique_id()
# we create a copy of the attributes here due to the fact that
# it was initialized as a class param in the meta class, so we
# need a unique object here (all members contained within are
# unique already).
self._attributes = self.__class__._attributes.copy()
self._attr_defaults = self.__class__._attr_defaults.copy()
for key, value in self._attr_defaults.items():
if callable(value):
self._attr_defaults[key] = value()
# and init vars, avoid using defaults in field declaration as it lives across plays
self.vars = dict()
def dump_me(self, depth=0):
''' this is never called from production code, it is here to be used when debugging as a 'complex print' '''
if depth == 0:
display.debug("DUMPING OBJECT ------------------------------------------------------")
display.debug("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self)))
if hasattr(self, '_parent') and self._parent:
self._parent.dump_me(depth + 2)
dep_chain = self._parent.get_dep_chain()
if dep_chain:
for dep in dep_chain:
dep.dump_me(depth + 2)
if hasattr(self, '_play') and self._play:
self._play.dump_me(depth + 2)
def preprocess_data(self, ds):
''' infrequently used method to do some pre-processing of legacy terms '''
return ds
def load_data(self, ds, variable_manager=None, loader=None):
''' walk the input datastructure and assign any values '''
if ds is None:
raise AnsibleAssertionError('ds (%s) should not be None but it is.' % ds)
# cache the datastructure internally
setattr(self, '_ds', ds)
# the variable manager class is used to manage and merge variables
# down to a single dictionary for reference in templating, etc.
self._variable_manager = variable_manager
# the data loader class is used to parse data from strings and files
if loader is not None:
self._loader = loader
else:
self._loader = DataLoader()
# call the preprocess_data() function to massage the data into
# something we can more easily parse, and then call the validation
# function on it to ensure there are no incorrect key values
ds = self.preprocess_data(ds)
self._validate_attributes(ds)
# Walk all attributes in the class. We sort them based on their priority
# so that certain fields can be loaded before others, if they are dependent.
for name, attr in sorted(iteritems(self._valid_attrs), key=operator.itemgetter(1)):
# copy the value over unless a _load_field method is defined
target_name = name
if name in self._alias_attrs:
target_name = self._alias_attrs[name]
if name in ds:
method = getattr(self, '_load_%s' % name, None)
if method:
self._attributes[target_name] = method(name, ds[name])
else:
self._attributes[target_name] = ds[name]
# run early, non-critical validation
self.validate()
# return the constructed object
return self
def get_ds(self):
try:
return getattr(self, '_ds')
except AttributeError:
return None
def get_loader(self):
return self._loader
def get_variable_manager(self):
return self._variable_manager
def _post_validate_debugger(self, attr, value, templar):
value = templar.template(value)
valid_values = frozenset(('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never'))
if value and isinstance(value, string_types) and value not in valid_values:
raise AnsibleParserError("'%s' is not a valid value for debugger. Must be one of %s" % (value, ', '.join(valid_values)), obj=self.get_ds())
return value
def _validate_attributes(self, ds):
'''
Ensures that there are no keys in the datastructure which do
not map to attributes for this object.
'''
valid_attrs = frozenset(self._valid_attrs.keys())
for key in ds:
if key not in valid_attrs:
raise AnsibleParserError("'%s' is not a valid attribute for a %s" % (key, self.__class__.__name__), obj=ds)
def validate(self, all_vars=None):
''' validation that is done at parse time, not load time '''
all_vars = {} if all_vars is None else all_vars
if not self._validated:
# walk all fields in the object
for (name, attribute) in iteritems(self._valid_attrs):
if name in self._alias_attrs:
name = self._alias_attrs[name]
# run validator only if present
method = getattr(self, '_validate_%s' % name, None)
if method:
method(attribute, name, getattr(self, name))
else:
# and make sure the attribute is of the type it should be
value = self._attributes[name]
if value is not None:
if attribute.isa == 'string' and isinstance(value, (list, dict)):
raise AnsibleParserError(
"The field '%s' is supposed to be a string type,"
" however the incoming data structure is a %s" % (name, type(value)), obj=self.get_ds()
)
self._validated = True
def squash(self):
'''
Evaluates all attributes and sets them to the evaluated version,
so that all future accesses of attributes do not need to evaluate
parent attributes.
'''
if not self._squashed:
for name in self._valid_attrs.keys():
self._attributes[name] = getattr(self, name)
self._squashed = True
def copy(self):
'''
Create a copy of this object and return it.
'''
new_me = self.__class__()
for name in self._valid_attrs.keys():
if name in self._alias_attrs:
continue
new_me._attributes[name] = shallowcopy(self._attributes[name])
new_me._attr_defaults[name] = shallowcopy(self._attr_defaults[name])
new_me._loader = self._loader
new_me._variable_manager = self._variable_manager
new_me._validated = self._validated
new_me._finalized = self._finalized
new_me._uuid = self._uuid
# if the ds value was set on the object, copy it to the new copy too
if hasattr(self, '_ds'):
new_me._ds = self._ds
return new_me
def get_validated_value(self, name, attribute, value, templar):
if attribute.isa == 'string':
value = to_text(value)
elif attribute.isa == 'int':
value = int(value)
elif attribute.isa == 'float':
value = float(value)
elif attribute.isa == 'bool':
value = boolean(value, strict=True)
elif attribute.isa == 'percent':
# special value, which may be an integer or float
# with an optional '%' at the end
if isinstance(value, string_types) and '%' in value:
value = value.replace('%', '')
value = float(value)
elif attribute.isa == 'list':
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
if attribute.listof is not None:
for item in value:
if not isinstance(item, attribute.listof):
raise AnsibleParserError("the field '%s' should be a list of %s, "
"but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds())
elif attribute.required and attribute.listof == string_types:
if item is None or item.strip() == "":
raise AnsibleParserError("the field '%s' is required, and cannot have empty values" % (name,), obj=self.get_ds())
elif attribute.isa == 'set':
if value is None:
value = set()
elif not isinstance(value, (list, set)):
if isinstance(value, string_types):
value = value.split(',')
else:
# Making a list like this handles strings of
# text and bytes properly
value = [value]
if not isinstance(value, set):
value = set(value)
elif attribute.isa == 'dict':
if value is None:
value = dict()
elif not isinstance(value, dict):
raise TypeError("%s is not a dictionary" % value)
elif attribute.isa == 'class':
if not isinstance(value, attribute.class_type):
raise TypeError("%s is not a valid %s (got a %s instead)" % (name, attribute.class_type, type(value)))
value.post_validate(templar=templar)
return value
def post_validate(self, templar):
'''
we can't tell that everything is of the right type until we have
all the variables. Run basic types (from isa) as well as
any _post_validate_<foo> functions.
'''
# save the omit value for later checking
omit_value = templar.available_variables.get('omit')
for (name, attribute) in iteritems(self._valid_attrs):
if attribute.static:
value = getattr(self, name)
# we don't template 'vars' but allow template as values for later use
if name not in ('vars',) and templar.is_template(value):
display.warning('"%s" is not templatable, but we found: %s, '
'it will not be templated and will be used "as is".' % (name, value))
continue
if getattr(self, name) is None:
if not attribute.required:
continue
else:
raise AnsibleParserError("the field '%s' is required but was not set" % name)
elif not attribute.always_post_validate and self.__class__.__name__ not in ('Task', 'Handler', 'PlayContext'):
# Intermediate objects like Play() won't have their fields validated by
# default, as their values are often inherited by other objects and validated
# later, so we don't want them to fail out early
continue
try:
# Run the post-validator if present. These methods are responsible for
# using the given templar to template the values, if required.
method = getattr(self, '_post_validate_%s' % name, None)
if method:
value = method(attribute, getattr(self, name), templar)
elif attribute.isa == 'class':
value = getattr(self, name)
else:
# if the attribute contains a variable, template it now
value = templar.template(getattr(self, name))
# if this evaluated to the omit value, set the value back to
# the default specified in the FieldAttribute and move on
if omit_value is not None and value == omit_value:
if callable(attribute.default):
setattr(self, name, attribute.default())
else:
setattr(self, name, attribute.default)
continue
# and make sure the attribute is of the type it should be
if value is not None:
value = self.get_validated_value(name, attribute, value, templar)
# and assign the massaged value back to the attribute field
setattr(self, name, value)
except (TypeError, ValueError) as e:
value = getattr(self, name)
raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s."
"The error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds(), orig_exc=e)
except (AnsibleUndefinedVariable, UndefinedError) as e:
if templar._fail_on_undefined_errors and name != 'name':
if name == 'args':
msg = "The task includes an option with an undefined variable. The error was: %s" % (to_native(e))
else:
msg = "The field '%s' has an invalid value, which includes an undefined variable. The error was: %s" % (name, to_native(e))
raise AnsibleParserError(msg, obj=self.get_ds(), orig_exc=e)
self._finalized = True
def _load_vars(self, attr, ds):
'''
Vars in a play can be specified either as a dictionary directly, or
as a list of dictionaries. If the later, this method will turn the
list into a single dictionary.
'''
def _validate_variable_keys(ds):
for key in ds:
if not isidentifier(key):
raise TypeError("'%s' is not a valid variable name" % key)
try:
if isinstance(ds, dict):
_validate_variable_keys(ds)
return combine_vars(self.vars, ds)
elif isinstance(ds, list):
all_vars = self.vars
for item in ds:
if not isinstance(item, dict):
raise ValueError
_validate_variable_keys(item)
all_vars = combine_vars(all_vars, item)
return all_vars
elif ds is None:
return {}
else:
raise ValueError
except ValueError as e:
raise AnsibleParserError("Vars in a %s must be specified as a dictionary, or a list of dictionaries" % self.__class__.__name__,
obj=ds, orig_exc=e)
except TypeError as e:
raise AnsibleParserError("Invalid variable name in vars specified for %s: %s" % (self.__class__.__name__, e), obj=ds, orig_exc=e)
def _extend_value(self, value, new_value, prepend=False):
'''
Will extend the value given with new_value (and will turn both
into lists if they are not so already). The values are run through
a set to remove duplicate values.
'''
if not isinstance(value, list):
value = [value]
if not isinstance(new_value, list):
new_value = [new_value]
# Due to where _extend_value may run for some attributes
# it is possible to end up with Sentinel in the list of values
# ensure we strip them
value = [v for v in value if v is not Sentinel]
new_value = [v for v in new_value if v is not Sentinel]
if prepend:
combined = new_value + value
else:
combined = value + new_value
return [i for i, _ in itertools.groupby(combined) if i is not None]
def dump_attrs(self):
'''
Dumps all attributes to a dictionary
'''
attrs = {}
for (name, attribute) in iteritems(self._valid_attrs):
attr = getattr(self, name)
if attribute.isa == 'class' and hasattr(attr, 'serialize'):
attrs[name] = attr.serialize()
else:
attrs[name] = attr
return attrs
def from_attrs(self, attrs):
'''
Loads attributes from a dictionary
'''
for (attr, value) in iteritems(attrs):
if attr in self._valid_attrs:
attribute = self._valid_attrs[attr]
if attribute.isa == 'class' and isinstance(value, dict):
obj = attribute.class_type()
obj.deserialize(value)
setattr(self, attr, obj)
else:
setattr(self, attr, value)
# from_attrs is only used to create a finalized task
# from attrs from the Worker/TaskExecutor
# Those attrs are finalized and squashed in the TE
# and controller side use needs to reflect that
self._finalized = True
self._squashed = True
def serialize(self):
'''
Serializes the object derived from the base object into
a dictionary of values. This only serializes the field
attributes for the object, so this may need to be overridden
for any classes which wish to add additional items not stored
as field attributes.
'''
repr = self.dump_attrs()
# serialize the uuid field
repr['uuid'] = self._uuid
repr['finalized'] = self._finalized
repr['squashed'] = self._squashed
return repr
def deserialize(self, data):
'''
Given a dictionary of values, load up the field attributes for
this object. As with serialize(), if there are any non-field
attribute data members, this method will need to be overridden
and extended.
'''
if not isinstance(data, dict):
raise AnsibleAssertionError('data (%s) should be a dict but is a %s' % (data, type(data)))
for (name, attribute) in iteritems(self._valid_attrs):
if name in data:
setattr(self, name, data[name])
else:
if callable(attribute.default):
setattr(self, name, attribute.default())
else:
setattr(self, name, attribute.default)
# restore the UUID field
setattr(self, '_uuid', data.get('uuid'))
self._finalized = data.get('finalized', False)
self._squashed = data.get('squashed', False)
class Base(FieldAttributeBase):
_name = FieldAttribute(isa='string', default='', always_post_validate=True, inherit=False)
# connection/transport
_connection = FieldAttribute(isa='string', default=context.cliargs_deferred_get('connection'))
_port = FieldAttribute(isa='int')
_remote_user = FieldAttribute(isa='string', default=context.cliargs_deferred_get('remote_user'))
# variables
_vars = FieldAttribute(isa='dict', priority=100, inherit=False, static=True)
# module default params
_module_defaults = FieldAttribute(isa='list', extend=True, prepend=True)
# flags and misc. settings
_environment = FieldAttribute(isa='list', extend=True, prepend=True)
_no_log = FieldAttribute(isa='bool')
_run_once = FieldAttribute(isa='bool')
_ignore_errors = FieldAttribute(isa='bool')
_ignore_unreachable = FieldAttribute(isa='bool')
_check_mode = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('check'))
_diff = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('diff'))
_any_errors_fatal = FieldAttribute(isa='bool', default=C.ANY_ERRORS_FATAL)
_throttle = FieldAttribute(isa='int', default=0)
_timeout = FieldAttribute(isa='int', default=C.TASK_TIMEOUT)
# explicitly invoke a debugger on tasks
_debugger = FieldAttribute(isa='string')
# Privilege escalation
_become = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('become'))
_become_method = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_method'))
_become_user = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_user'))
_become_flags = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_flags'))
_become_exe = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_exe'))
# used to hold sudo/su stuff
DEPRECATED_ATTRIBUTES = []
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 69,397 |
Feature: yum --cacheonly
|
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
Add a --cacheonly option to the ansible yum module, similar to the --downloadonly option.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
lib/ansible/modules/packaging/os/yum.py
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
We have a group of Oracle servers that have no repository management server. They update directly from Oracle's repositories. Our thought is to do a "yum update --downloadonly" on all dev and prod servers. Then we can update the dev servers first using "yum update --cacheonly" to test then updated and run the same command on the prod at a later time to make them consistent with the dev.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/69397
|
https://github.com/ansible/ansible/pull/73820
|
0c101f3f769b9aaed1a99f8858197f4d28b11034
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
| 2020-05-08T18:48:18Z |
python
| 2021-04-16T15:08:52Z |
changelogs/fragments/73820-yumdnf-add_cacheonly_option.yaml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 69,397 |
Feature: yum --cacheonly
|
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
Add a --cacheonly option to the ansible yum module, similar to the --downloadonly option.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
lib/ansible/modules/packaging/os/yum.py
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
We have a group of Oracle servers that have no repository management server. They update directly from Oracle's repositories. Our thought is to do a "yum update --downloadonly" on all dev and prod servers. Then we can update the dev servers first using "yum update --cacheonly" to test then updated and run the same command on the prod at a later time to make them consistent with the dev.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/69397
|
https://github.com/ansible/ansible/pull/73820
|
0c101f3f769b9aaed1a99f8858197f4d28b11034
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
| 2020-05-08T18:48:18Z |
python
| 2021-04-16T15:08:52Z |
lib/ansible/module_utils/yumdnf.py
|
# -*- coding: utf-8 -*-
#
# # Copyright: (c) 2012, Red Hat, Inc
# Written by Seth Vidal <skvidal at fedoraproject.org>
# Contributing Authors:
# - Ansible Core Team
# - Eduard Snesarev (@verm666)
# - Berend De Schouwer (@berenddeschouwer)
# - Abhijeet Kasurde (@Akasurde)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import time
import glob
import tempfile
from abc import ABCMeta, abstractmethod
from ansible.module_utils._text import to_native
from ansible.module_utils.six import with_metaclass
yumdnf_argument_spec = dict(
argument_spec=dict(
allow_downgrade=dict(type='bool', default=False),
autoremove=dict(type='bool', default=False),
bugfix=dict(required=False, type='bool', default=False),
conf_file=dict(type='str'),
disable_excludes=dict(type='str', default=None),
disable_gpg_check=dict(type='bool', default=False),
disable_plugin=dict(type='list', elements='str', default=[]),
disablerepo=dict(type='list', elements='str', default=[]),
download_only=dict(type='bool', default=False),
download_dir=dict(type='str', default=None),
enable_plugin=dict(type='list', elements='str', default=[]),
enablerepo=dict(type='list', elements='str', default=[]),
exclude=dict(type='list', elements='str', default=[]),
installroot=dict(type='str', default="/"),
install_repoquery=dict(type='bool', default=True),
install_weak_deps=dict(type='bool', default=True),
list=dict(type='str'),
name=dict(type='list', elements='str', aliases=['pkg'], default=[]),
releasever=dict(default=None),
security=dict(type='bool', default=False),
skip_broken=dict(type='bool', default=False),
# removed==absent, installed==present, these are accepted as aliases
state=dict(type='str', default=None, choices=['absent', 'installed', 'latest', 'present', 'removed']),
update_cache=dict(type='bool', default=False, aliases=['expire-cache']),
update_only=dict(required=False, default="no", type='bool'),
validate_certs=dict(type='bool', default=True),
lock_timeout=dict(type='int', default=30),
),
required_one_of=[['name', 'list', 'update_cache']],
mutually_exclusive=[['name', 'list']],
supports_check_mode=True,
)
class YumDnf(with_metaclass(ABCMeta, object)):
"""
Abstract class that handles the population of instance variables that should
be identical between both YUM and DNF modules because of the feature parity
and shared argument spec
"""
def __init__(self, module):
self.module = module
self.allow_downgrade = self.module.params['allow_downgrade']
self.autoremove = self.module.params['autoremove']
self.bugfix = self.module.params['bugfix']
self.conf_file = self.module.params['conf_file']
self.disable_excludes = self.module.params['disable_excludes']
self.disable_gpg_check = self.module.params['disable_gpg_check']
self.disable_plugin = self.module.params['disable_plugin']
self.disablerepo = self.module.params.get('disablerepo', [])
self.download_only = self.module.params['download_only']
self.download_dir = self.module.params['download_dir']
self.enable_plugin = self.module.params['enable_plugin']
self.enablerepo = self.module.params.get('enablerepo', [])
self.exclude = self.module.params['exclude']
self.installroot = self.module.params['installroot']
self.install_repoquery = self.module.params['install_repoquery']
self.install_weak_deps = self.module.params['install_weak_deps']
self.list = self.module.params['list']
self.names = [p.strip() for p in self.module.params['name']]
self.releasever = self.module.params['releasever']
self.security = self.module.params['security']
self.skip_broken = self.module.params['skip_broken']
self.state = self.module.params['state']
self.update_only = self.module.params['update_only']
self.update_cache = self.module.params['update_cache']
self.validate_certs = self.module.params['validate_certs']
self.lock_timeout = self.module.params['lock_timeout']
# It's possible someone passed a comma separated string since it used
# to be a string type, so we should handle that
self.names = self.listify_comma_sep_strings_in_list(self.names)
self.disablerepo = self.listify_comma_sep_strings_in_list(self.disablerepo)
self.enablerepo = self.listify_comma_sep_strings_in_list(self.enablerepo)
self.exclude = self.listify_comma_sep_strings_in_list(self.exclude)
# Fail if someone passed a space separated string
# https://github.com/ansible/ansible/issues/46301
for name in self.names:
if ' ' in name and not any(spec in name for spec in ['@', '>', '<', '=']):
module.fail_json(
msg='It appears that a space separated string of packages was passed in '
'as an argument. To operate on several packages, pass a comma separated '
'string of packages or a list of packages.'
)
# Sanity checking for autoremove
if self.state is None:
if self.autoremove:
self.state = "absent"
else:
self.state = "present"
if self.autoremove and (self.state != "absent"):
self.module.fail_json(
msg="Autoremove should be used alone or with state=absent",
results=[],
)
# This should really be redefined by both the yum and dnf module but a
# default isn't a bad idea
self.lockfile = '/var/run/yum.pid'
@abstractmethod
def is_lockfile_pid_valid(self):
return
def _is_lockfile_present(self):
return (os.path.isfile(self.lockfile) or glob.glob(self.lockfile)) and self.is_lockfile_pid_valid()
def wait_for_lock(self):
'''Poll until the lock is removed if timeout is a positive number'''
if not self._is_lockfile_present():
return
if self.lock_timeout > 0:
for iteration in range(0, self.lock_timeout):
time.sleep(1)
if not self._is_lockfile_present():
return
self.module.fail_json(msg='{0} lockfile is held by another process'.format(self.pkg_mgr_name))
def listify_comma_sep_strings_in_list(self, some_list):
"""
method to accept a list of strings as the parameter, find any strings
in that list that are comma separated, remove them from the list and add
their comma separated elements to the original list
"""
new_list = []
remove_from_original_list = []
for element in some_list:
if ',' in element:
remove_from_original_list.append(element)
new_list.extend([e.strip() for e in element.split(',')])
for element in remove_from_original_list:
some_list.remove(element)
some_list.extend(new_list)
if some_list == [""]:
return []
return some_list
@abstractmethod
def run(self):
raise NotImplementedError
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 69,397 |
Feature: yum --cacheonly
|
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
Add a --cacheonly option to the ansible yum module, similar to the --downloadonly option.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
lib/ansible/modules/packaging/os/yum.py
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
We have a group of Oracle servers that have no repository management server. They update directly from Oracle's repositories. Our thought is to do a "yum update --downloadonly" on all dev and prod servers. Then we can update the dev servers first using "yum update --cacheonly" to test then updated and run the same command on the prod at a later time to make them consistent with the dev.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/69397
|
https://github.com/ansible/ansible/pull/73820
|
0c101f3f769b9aaed1a99f8858197f4d28b11034
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
| 2020-05-08T18:48:18Z |
python
| 2021-04-16T15:08:52Z |
lib/ansible/modules/dnf.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2015 Cristian van Ee <cristian at cvee.org>
# Copyright 2015 Igor Gnatenko <[email protected]>
# Copyright 2018 Adam Miller <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: dnf
version_added: 1.9
short_description: Manages packages with the I(dnf) package manager
description:
- Installs, upgrade, removes, and lists packages and groups with the I(dnf) package manager.
options:
name:
description:
- "A package name or package specifier with version, like C(name-1.0).
When using state=latest, this can be '*' which means run: dnf -y update.
You can also pass a url or a local path to a rpm file.
To operate on several packages this can accept a comma separated string of packages or a list of packages."
- Comparison operators for package version are valid here C(>), C(<), C(>=), C(<=). Example - C(name>=1.0)
required: true
aliases:
- pkg
type: list
elements: str
list:
description:
- Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. See examples.
type: str
state:
description:
- Whether to install (C(present), C(latest)), or remove (C(absent)) a package.
- Default is C(None), however in effect the default action is C(present) unless the C(autoremove) option is
enabled for this module, then C(absent) is inferred.
choices: ['absent', 'present', 'installed', 'removed', 'latest']
type: str
enablerepo:
description:
- I(Repoid) of repositories to enable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
type: list
elements: str
disablerepo:
description:
- I(Repoid) of repositories to disable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
type: list
elements: str
conf_file:
description:
- The remote dnf configuration file to use for the transaction.
type: str
disable_gpg_check:
description:
- Whether to disable the GPG checking of signatures of packages being
installed. Has an effect only if state is I(present) or I(latest).
- This setting affects packages installed from a repository as well as
"local" packages installed from the filesystem or a URL.
type: bool
default: 'no'
installroot:
description:
- Specifies an alternative installroot, relative to which all packages
will be installed.
version_added: "2.3"
default: "/"
type: str
releasever:
description:
- Specifies an alternative release from which all packages will be
installed.
version_added: "2.6"
type: str
autoremove:
description:
- If C(yes), removes all "leaf" packages from the system that were originally
installed as dependencies of user-installed packages but which are no longer
required by any such package. Should be used alone or when state is I(absent)
type: bool
default: "no"
version_added: "2.4"
exclude:
description:
- Package name(s) to exclude when state=present, or latest. This can be a
list or a comma separated string.
version_added: "2.7"
type: list
elements: str
skip_broken:
description:
- Skip packages with broken dependencies(devsolve) and are causing problems.
type: bool
default: "no"
version_added: "2.7"
update_cache:
description:
- Force dnf to check if cache is out of date and redownload if needed.
Has an effect only if state is I(present) or I(latest).
type: bool
default: "no"
aliases: [ expire-cache ]
version_added: "2.7"
update_only:
description:
- When using latest, only update installed packages. Do not install packages.
- Has an effect only if state is I(latest)
default: "no"
type: bool
version_added: "2.7"
security:
description:
- If set to C(yes), and C(state=latest) then only installs updates that have been marked security related.
- Note that, similar to ``dnf upgrade-minimal``, this filter applies to dependencies as well.
type: bool
default: "no"
version_added: "2.7"
bugfix:
description:
- If set to C(yes), and C(state=latest) then only installs updates that have been marked bugfix related.
- Note that, similar to ``dnf upgrade-minimal``, this filter applies to dependencies as well.
default: "no"
type: bool
version_added: "2.7"
enable_plugin:
description:
- I(Plugin) name to enable for the install/update operation.
The enabled plugin will not persist beyond the transaction.
version_added: "2.7"
type: list
elements: str
disable_plugin:
description:
- I(Plugin) name to disable for the install/update operation.
The disabled plugins will not persist beyond the transaction.
version_added: "2.7"
type: list
elements: str
disable_excludes:
description:
- Disable the excludes defined in DNF config files.
- If set to C(all), disables all excludes.
- If set to C(main), disable excludes defined in [main] in dnf.conf.
- If set to C(repoid), disable excludes defined for given repo id.
version_added: "2.7"
type: str
validate_certs:
description:
- This only applies if using a https url as the source of the rpm. e.g. for localinstall. If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates as it avoids verifying the source site.
type: bool
default: "yes"
version_added: "2.7"
allow_downgrade:
description:
- Specify if the named package and version is allowed to downgrade
a maybe already installed higher version of that package.
Note that setting allow_downgrade=True can make this module
behave in a non-idempotent way. The task could end up with a set
of packages that does not match the complete list of specified
packages to install (because dependencies between the downgraded
package and others can cause changes to the packages which were
in the earlier transaction).
type: bool
default: "no"
version_added: "2.7"
install_repoquery:
description:
- This is effectively a no-op in DNF as it is not needed with DNF, but is an accepted parameter for feature
parity/compatibility with the I(yum) module.
type: bool
default: "yes"
version_added: "2.7"
download_only:
description:
- Only download the packages, do not install them.
default: "no"
type: bool
version_added: "2.7"
lock_timeout:
description:
- Amount of time to wait for the dnf lockfile to be freed.
required: false
default: 30
type: int
version_added: "2.8"
install_weak_deps:
description:
- Will also install all packages linked by a weak dependency relation.
type: bool
default: "yes"
version_added: "2.8"
download_dir:
description:
- Specifies an alternate directory to store packages.
- Has an effect only if I(download_only) is specified.
type: str
version_added: "2.8"
allowerasing:
description:
- If C(yes) it allows erasing of installed packages to resolve dependencies.
required: false
type: bool
default: "no"
version_added: "2.10"
nobest:
description:
- Set best option to False, so that transactions are not limited to best candidates only.
required: false
type: bool
default: "no"
version_added: "2.11"
notes:
- When used with a `loop:` each package will be processed individually, it is much more efficient to pass the list directly to the `name` option.
- Group removal doesn't work if the group was installed with Ansible because
upstream dnf's API doesn't properly mark groups as installed, therefore upon
removal the module is unable to detect that the group is installed
(https://bugzilla.redhat.com/show_bug.cgi?id=1620324)
requirements:
- "python >= 2.6"
- python-dnf
- for the autoremove option you need dnf >= 2.0.1"
author:
- Igor Gnatenko (@ignatenkobrain) <[email protected]>
- Cristian van Ee (@DJMuggs) <cristian at cvee.org>
- Berend De Schouwer (@berenddeschouwer)
- Adam Miller (@maxamillion) <[email protected]>
'''
EXAMPLES = '''
- name: Install the latest version of Apache
dnf:
name: httpd
state: latest
- name: Install Apache >= 2.4
dnf:
name: httpd>=2.4
state: present
- name: Install the latest version of Apache and MariaDB
dnf:
name:
- httpd
- mariadb-server
state: latest
- name: Remove the Apache package
dnf:
name: httpd
state: absent
- name: Install the latest version of Apache from the testing repo
dnf:
name: httpd
enablerepo: testing
state: present
- name: Upgrade all packages
dnf:
name: "*"
state: latest
- name: Install the nginx rpm from a remote repo
dnf:
name: 'http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm'
state: present
- name: Install nginx rpm from a local file
dnf:
name: /usr/local/src/nginx-release-centos-6-0.el6.ngx.noarch.rpm
state: present
- name: Install the 'Development tools' package group
dnf:
name: '@Development tools'
state: present
- name: Autoremove unneeded packages installed as dependencies
dnf:
autoremove: yes
- name: Uninstall httpd but keep its dependencies
dnf:
name: httpd
state: absent
autoremove: no
- name: Install a modularity appstream with defined stream and profile
dnf:
name: '@postgresql:9.6/client'
state: present
- name: Install a modularity appstream with defined stream
dnf:
name: '@postgresql:9.6'
state: present
- name: Install a modularity appstream with defined profile
dnf:
name: '@postgresql/client'
state: present
'''
import os
import re
import sys
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.urls import fetch_file
from ansible.module_utils.six import PY2, text_type
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
from ansible.module_utils.yumdnf import YumDnf, yumdnf_argument_spec
try:
import dnf
import dnf.cli
import dnf.const
import dnf.exceptions
import dnf.subject
import dnf.util
HAS_DNF = True
except ImportError:
HAS_DNF = False
class DnfModule(YumDnf):
"""
DNF Ansible module back-end implementation
"""
def __init__(self, module):
# This populates instance vars for all argument spec params
super(DnfModule, self).__init__(module)
self._ensure_dnf()
self.lockfile = "/var/cache/dnf/*_lock.pid"
self.pkg_mgr_name = "dnf"
try:
self.with_modules = dnf.base.WITH_MODULES
except AttributeError:
self.with_modules = False
# DNF specific args that are not part of YumDnf
self.allowerasing = self.module.params['allowerasing']
self.nobest = self.module.params['nobest']
def is_lockfile_pid_valid(self):
# FIXME? it looks like DNF takes care of invalid lock files itself?
# https://github.com/ansible/ansible/issues/57189
return True
def _sanitize_dnf_error_msg_install(self, spec, error):
"""
For unhandled dnf.exceptions.Error scenarios, there are certain error
messages we want to filter in an install scenario. Do that here.
"""
if (
to_text("no package matched") in to_text(error) or
to_text("No match for argument:") in to_text(error)
):
return "No package {0} available.".format(spec)
return error
def _sanitize_dnf_error_msg_remove(self, spec, error):
"""
For unhandled dnf.exceptions.Error scenarios, there are certain error
messages we want to ignore in a removal scenario as known benign
failures. Do that here.
"""
if (
'no package matched' in to_native(error) or
'No match for argument:' in to_native(error)
):
return (False, "{0} is not installed".format(spec))
# Return value is tuple of:
# ("Is this actually a failure?", "Error Message")
return (True, error)
def _package_dict(self, package):
"""Return a dictionary of information for the package."""
# NOTE: This no longer contains the 'dnfstate' field because it is
# already known based on the query type.
result = {
'name': package.name,
'arch': package.arch,
'epoch': str(package.epoch),
'release': package.release,
'version': package.version,
'repo': package.repoid}
result['nevra'] = '{epoch}:{name}-{version}-{release}.{arch}'.format(
**result)
if package.installtime == 0:
result['yumstate'] = 'available'
else:
result['yumstate'] = 'installed'
return result
def _packagename_dict(self, packagename):
"""
Return a dictionary of information for a package name string or None
if the package name doesn't contain at least all NVR elements
"""
if packagename[-4:] == '.rpm':
packagename = packagename[:-4]
# This list was auto generated on a Fedora 28 system with the following one-liner
# printf '[ '; for arch in $(ls /usr/lib/rpm/platform); do printf '"%s", ' ${arch%-linux}; done; printf ']\n'
redhat_rpm_arches = [
"aarch64", "alphaev56", "alphaev5", "alphaev67", "alphaev6", "alpha",
"alphapca56", "amd64", "armv3l", "armv4b", "armv4l", "armv5tejl", "armv5tel",
"armv5tl", "armv6hl", "armv6l", "armv7hl", "armv7hnl", "armv7l", "athlon",
"geode", "i386", "i486", "i586", "i686", "ia32e", "ia64", "m68k", "mips64el",
"mips64", "mips64r6el", "mips64r6", "mipsel", "mips", "mipsr6el", "mipsr6",
"noarch", "pentium3", "pentium4", "ppc32dy4", "ppc64iseries", "ppc64le", "ppc64",
"ppc64p7", "ppc64pseries", "ppc8260", "ppc8560", "ppciseries", "ppc", "ppcpseries",
"riscv64", "s390", "s390x", "sh3", "sh4a", "sh4", "sh", "sparc64", "sparc64v",
"sparc", "sparcv8", "sparcv9", "sparcv9v", "x86_64"
]
rpm_arch_re = re.compile(r'(.*)\.(.*)')
rpm_nevr_re = re.compile(r'(\S+)-(?:(\d*):)?(.*)-(~?\w+[\w.+]*)')
try:
arch = None
rpm_arch_match = rpm_arch_re.match(packagename)
if rpm_arch_match:
nevr, arch = rpm_arch_match.groups()
if arch in redhat_rpm_arches:
packagename = nevr
rpm_nevr_match = rpm_nevr_re.match(packagename)
if rpm_nevr_match:
name, epoch, version, release = rpm_nevr_re.match(packagename).groups()
if not version or not version.split('.')[0].isdigit():
return None
else:
return None
except AttributeError as e:
self.module.fail_json(
msg='Error attempting to parse package: %s, %s' % (packagename, to_native(e)),
rc=1,
results=[]
)
if not epoch:
epoch = "0"
if ':' in name:
epoch_name = name.split(":")
epoch = epoch_name[0]
name = ''.join(epoch_name[1:])
result = {
'name': name,
'epoch': epoch,
'release': release,
'version': version,
}
return result
# Original implementation from yum.rpmUtils.miscutils (GPLv2+)
# http://yum.baseurl.org/gitweb?p=yum.git;a=blob;f=rpmUtils/miscutils.py
def _compare_evr(self, e1, v1, r1, e2, v2, r2):
# return 1: a is newer than b
# 0: a and b are the same version
# -1: b is newer than a
if e1 is None:
e1 = '0'
else:
e1 = str(e1)
v1 = str(v1)
r1 = str(r1)
if e2 is None:
e2 = '0'
else:
e2 = str(e2)
v2 = str(v2)
r2 = str(r2)
# print '%s, %s, %s vs %s, %s, %s' % (e1, v1, r1, e2, v2, r2)
rc = dnf.rpm.rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
# print '%s, %s, %s vs %s, %s, %s = %s' % (e1, v1, r1, e2, v2, r2, rc)
return rc
def _ensure_dnf(self):
if HAS_DNF:
return
system_interpreters = ['/usr/libexec/platform-python',
'/usr/bin/python3',
'/usr/bin/python2',
'/usr/bin/python']
if not has_respawned():
# probe well-known system Python locations for accessible bindings, favoring py3
interpreter = probe_interpreters_for_module(system_interpreters, 'dnf')
if interpreter:
# respawn under the interpreter where the bindings should be found
respawn_module(interpreter)
# end of the line for this module, the process will exit here once the respawned module completes
# done all we can do, something is just broken (auto-install isn't useful anymore with respawn, so it was removed)
self.module.fail_json(
msg="Could not import the dnf python module using {0} ({1}). "
"Please install `python3-dnf` or `python2-dnf` package or ensure you have specified the "
"correct ansible_python_interpreter. (attempted {2})"
.format(sys.executable, sys.version.replace('\n', ''), system_interpreters),
results=[]
)
def _configure_base(self, base, conf_file, disable_gpg_check, installroot='/'):
"""Configure the dnf Base object."""
conf = base.conf
# Change the configuration file path if provided, this must be done before conf.read() is called
if conf_file:
# Fail if we can't read the configuration file.
if not os.access(conf_file, os.R_OK):
self.module.fail_json(
msg="cannot read configuration file", conf_file=conf_file,
results=[],
)
else:
conf.config_file_path = conf_file
# Read the configuration file
conf.read()
# Turn off debug messages in the output
conf.debuglevel = 0
# Set whether to check gpg signatures
conf.gpgcheck = not disable_gpg_check
conf.localpkg_gpgcheck = not disable_gpg_check
# Don't prompt for user confirmations
conf.assumeyes = True
# Set installroot
conf.installroot = installroot
# Load substitutions from the filesystem
conf.substitutions.update_from_etc(installroot)
# Handle different DNF versions immutable mutable datatypes and
# dnf v1/v2/v3
#
# In DNF < 3.0 are lists, and modifying them works
# In DNF >= 3.0 < 3.6 are lists, but modifying them doesn't work
# In DNF >= 3.6 have been turned into tuples, to communicate that modifying them doesn't work
#
# https://www.happyassassin.net/2018/06/27/adams-debugging-adventures-the-immutable-mutable-object/
#
# Set excludes
if self.exclude:
_excludes = list(conf.exclude)
_excludes.extend(self.exclude)
conf.exclude = _excludes
# Set disable_excludes
if self.disable_excludes:
_disable_excludes = list(conf.disable_excludes)
if self.disable_excludes not in _disable_excludes:
_disable_excludes.append(self.disable_excludes)
conf.disable_excludes = _disable_excludes
# Set releasever
if self.releasever is not None:
conf.substitutions['releasever'] = self.releasever
# Set skip_broken (in dnf this is strict=0)
if self.skip_broken:
conf.strict = 0
# Set best
if self.nobest:
conf.best = 0
if self.download_only:
conf.downloadonly = True
if self.download_dir:
conf.destdir = self.download_dir
# Default in dnf upstream is true
conf.clean_requirements_on_remove = self.autoremove
# Default in dnf (and module default) is True
conf.install_weak_deps = self.install_weak_deps
def _specify_repositories(self, base, disablerepo, enablerepo):
"""Enable and disable repositories matching the provided patterns."""
base.read_all_repos()
repos = base.repos
# Disable repositories
for repo_pattern in disablerepo:
if repo_pattern:
for repo in repos.get_matching(repo_pattern):
repo.disable()
# Enable repositories
for repo_pattern in enablerepo:
if repo_pattern:
for repo in repos.get_matching(repo_pattern):
repo.enable()
def _base(self, conf_file, disable_gpg_check, disablerepo, enablerepo, installroot):
"""Return a fully configured dnf Base object."""
base = dnf.Base()
self._configure_base(base, conf_file, disable_gpg_check, installroot)
try:
# this method has been supported in dnf-4.2.17-6 or later
# https://bugzilla.redhat.com/show_bug.cgi?id=1788212
base.setup_loggers()
except AttributeError:
pass
try:
base.init_plugins(set(self.disable_plugin), set(self.enable_plugin))
base.pre_configure_plugins()
except AttributeError:
pass # older versions of dnf didn't require this and don't have these methods
self._specify_repositories(base, disablerepo, enablerepo)
try:
base.configure_plugins()
except AttributeError:
pass # older versions of dnf didn't require this and don't have these methods
try:
if self.update_cache:
try:
base.update_cache()
except dnf.exceptions.RepoError as e:
self.module.fail_json(
msg="{0}".format(to_text(e)),
results=[],
rc=1
)
base.fill_sack(load_system_repo='auto')
except dnf.exceptions.RepoError as e:
self.module.fail_json(
msg="{0}".format(to_text(e)),
results=[],
rc=1
)
filters = []
if self.bugfix:
key = {'advisory_type__eq': 'bugfix'}
filters.append(base.sack.query().upgrades().filter(**key))
if self.security:
key = {'advisory_type__eq': 'security'}
filters.append(base.sack.query().upgrades().filter(**key))
if filters:
base._update_security_filters = filters
return base
def list_items(self, command):
"""List package info based on the command."""
# Rename updates to upgrades
if command == 'updates':
command = 'upgrades'
# Return the corresponding packages
if command in ['installed', 'upgrades', 'available']:
results = [
self._package_dict(package)
for package in getattr(self.base.sack.query(), command)()]
# Return the enabled repository ids
elif command in ['repos', 'repositories']:
results = [
{'repoid': repo.id, 'state': 'enabled'}
for repo in self.base.repos.iter_enabled()]
# Return any matching packages
else:
packages = dnf.subject.Subject(command).get_best_query(self.base.sack)
results = [self._package_dict(package) for package in packages]
self.module.exit_json(msg="", results=results)
def _is_installed(self, pkg):
installed = self.base.sack.query().installed()
if installed.filter(name=pkg):
return True
else:
return False
def _is_newer_version_installed(self, pkg_name):
candidate_pkg = self._packagename_dict(pkg_name)
if not candidate_pkg:
# The user didn't provide a versioned rpm, so version checking is
# not required
return False
installed = self.base.sack.query().installed()
installed_pkg = installed.filter(name=candidate_pkg['name']).run()
if installed_pkg:
installed_pkg = installed_pkg[0]
# this looks weird but one is a dict and the other is a dnf.Package
evr_cmp = self._compare_evr(
installed_pkg.epoch, installed_pkg.version, installed_pkg.release,
candidate_pkg['epoch'], candidate_pkg['version'], candidate_pkg['release'],
)
if evr_cmp == 1:
return True
else:
return False
else:
return False
def _mark_package_install(self, pkg_spec, upgrade=False):
"""Mark the package for install."""
is_newer_version_installed = self._is_newer_version_installed(pkg_spec)
is_installed = self._is_installed(pkg_spec)
try:
if is_newer_version_installed:
if self.allow_downgrade:
# dnf only does allow_downgrade, we have to handle this ourselves
# because it allows a possibility for non-idempotent transactions
# on a system's package set (pending the yum repo has many old
# NVRs indexed)
if upgrade:
if is_installed:
self.base.upgrade(pkg_spec)
else:
self.base.install(pkg_spec)
else:
self.base.install(pkg_spec)
else: # Nothing to do, report back
pass
elif is_installed: # An potentially older (or same) version is installed
if upgrade:
self.base.upgrade(pkg_spec)
else: # Nothing to do, report back
pass
else: # The package is not installed, simply install it
self.base.install(pkg_spec)
return {'failed': False, 'msg': '', 'failure': '', 'rc': 0}
except dnf.exceptions.MarkingError as e:
return {
'failed': True,
'msg': "No package {0} available.".format(pkg_spec),
'failure': " ".join((pkg_spec, to_native(e))),
'rc': 1,
"results": []
}
except dnf.exceptions.DepsolveError as e:
return {
'failed': True,
'msg': "Depsolve Error occured for package {0}.".format(pkg_spec),
'failure': " ".join((pkg_spec, to_native(e))),
'rc': 1,
"results": []
}
except dnf.exceptions.Error as e:
if to_text("already installed") in to_text(e):
return {'failed': False, 'msg': '', 'failure': ''}
else:
return {
'failed': True,
'msg': "Unknown Error occured for package {0}.".format(pkg_spec),
'failure': " ".join((pkg_spec, to_native(e))),
'rc': 1,
"results": []
}
def _whatprovides(self, filepath):
available = self.base.sack.query().available()
pkg_spec = available.filter(provides=filepath).run()
if pkg_spec:
return pkg_spec[0].name
def _parse_spec_group_file(self):
pkg_specs, grp_specs, module_specs, filenames = [], [], [], []
already_loaded_comps = False # Only load this if necessary, it's slow
for name in self.names:
if '://' in name:
name = fetch_file(self.module, name)
filenames.append(name)
elif name.endswith(".rpm"):
filenames.append(name)
elif name.startswith("@") or ('/' in name):
# like "dnf install /usr/bin/vi"
if '/' in name:
pkg_spec = self._whatprovides(name)
if pkg_spec:
pkg_specs.append(pkg_spec)
continue
if not already_loaded_comps:
self.base.read_comps()
already_loaded_comps = True
grp_env_mdl_candidate = name[1:].strip()
if self.with_modules:
mdl = self.module_base._get_modules(grp_env_mdl_candidate)
if mdl[0]:
module_specs.append(grp_env_mdl_candidate)
else:
grp_specs.append(grp_env_mdl_candidate)
else:
grp_specs.append(grp_env_mdl_candidate)
else:
pkg_specs.append(name)
return pkg_specs, grp_specs, module_specs, filenames
def _update_only(self, pkgs):
not_installed = []
for pkg in pkgs:
if self._is_installed(pkg):
try:
if isinstance(to_text(pkg), text_type):
self.base.upgrade(pkg)
else:
self.base.package_upgrade(pkg)
except Exception as e:
self.module.fail_json(
msg="Error occured attempting update_only operation: {0}".format(to_native(e)),
results=[],
rc=1,
)
else:
not_installed.append(pkg)
return not_installed
def _install_remote_rpms(self, filenames):
if int(dnf.__version__.split(".")[0]) >= 2:
pkgs = list(sorted(self.base.add_remote_rpms(list(filenames)), reverse=True))
else:
pkgs = []
try:
for filename in filenames:
pkgs.append(self.base.add_remote_rpm(filename))
except IOError as e:
if to_text("Can not load RPM file") in to_text(e):
self.module.fail_json(
msg="Error occured attempting remote rpm install of package: {0}. {1}".format(filename, to_native(e)),
results=[],
rc=1,
)
if self.update_only:
self._update_only(pkgs)
else:
for pkg in pkgs:
try:
if self._is_newer_version_installed(self._package_dict(pkg)['nevra']):
if self.allow_downgrade:
self.base.package_install(pkg)
else:
self.base.package_install(pkg)
except Exception as e:
self.module.fail_json(
msg="Error occured attempting remote rpm operation: {0}".format(to_native(e)),
results=[],
rc=1,
)
def _is_module_installed(self, module_spec):
if self.with_modules:
module_spec = module_spec.strip()
module_list, nsv = self.module_base._get_modules(module_spec)
enabled_streams = self.base._moduleContainer.getEnabledStream(nsv.name)
if enabled_streams:
if nsv.stream:
if nsv.stream in enabled_streams:
return True # The provided stream was found
else:
return False # The provided stream was not found
else:
return True # No stream provided, but module found
return False # seems like a sane default
def ensure(self):
response = {
'msg': "",
'changed': False,
'results': [],
'rc': 0
}
# Accumulate failures. Package management modules install what they can
# and fail with a message about what they can't.
failure_response = {
'msg': "",
'failures': [],
'results': [],
'rc': 1
}
# Autoremove is called alone
# Jump to remove path where base.autoremove() is run
if not self.names and self.autoremove:
self.names = []
self.state = 'absent'
if self.names == ['*'] and self.state == 'latest':
try:
self.base.upgrade_all()
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured attempting to upgrade all packages"
self.module.fail_json(**failure_response)
else:
pkg_specs, group_specs, module_specs, filenames = self._parse_spec_group_file()
pkg_specs = [p.strip() for p in pkg_specs]
filenames = [f.strip() for f in filenames]
groups = []
environments = []
for group_spec in (g.strip() for g in group_specs):
group = self.base.comps.group_by_pattern(group_spec)
if group:
groups.append(group.id)
else:
environment = self.base.comps.environment_by_pattern(group_spec)
if environment:
environments.append(environment.id)
else:
self.module.fail_json(
msg="No group {0} available.".format(group_spec),
results=[],
)
if self.state in ['installed', 'present']:
# Install files.
self._install_remote_rpms(filenames)
for filename in filenames:
response['results'].append("Installed {0}".format(filename))
# Install modules
if module_specs and self.with_modules:
for module in module_specs:
try:
if not self._is_module_installed(module):
response['results'].append("Module {0} installed.".format(module))
self.module_base.install([module])
self.module_base.enable([module])
except dnf.exceptions.MarkingErrors as e:
failure_response['failures'].append(' '.join((module, to_native(e))))
# Install groups.
for group in groups:
try:
group_pkg_count_installed = self.base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES)
if group_pkg_count_installed == 0:
response['results'].append("Group {0} already installed.".format(group))
else:
response['results'].append("Group {0} installed.".format(group))
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured attempting to install group: {0}".format(group)
self.module.fail_json(**failure_response)
except dnf.exceptions.Error as e:
# In dnf 2.0 if all the mandatory packages in a group do
# not install, an error is raised. We want to capture
# this but still install as much as possible.
failure_response['failures'].append(" ".join((group, to_native(e))))
for environment in environments:
try:
self.base.environment_install(environment, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured attempting to install environment: {0}".format(environment)
self.module.fail_json(**failure_response)
except dnf.exceptions.Error as e:
failure_response['failures'].append(" ".join((environment, to_native(e))))
if module_specs and not self.with_modules:
# This means that the group or env wasn't found in comps
self.module.fail_json(
msg="No group {0} available.".format(module_specs[0]),
results=[],
)
# Install packages.
if self.update_only:
not_installed = self._update_only(pkg_specs)
for spec in not_installed:
response['results'].append("Packages providing %s not installed due to update_only specified" % spec)
else:
for pkg_spec in pkg_specs:
install_result = self._mark_package_install(pkg_spec)
if install_result['failed']:
if install_result['msg']:
failure_response['msg'] += install_result['msg']
failure_response['failures'].append(self._sanitize_dnf_error_msg_install(pkg_spec, install_result['failure']))
else:
if install_result['msg']:
response['results'].append(install_result['msg'])
elif self.state == 'latest':
# "latest" is same as "installed" for filenames.
self._install_remote_rpms(filenames)
for filename in filenames:
response['results'].append("Installed {0}".format(filename))
# Upgrade modules
if module_specs and self.with_modules:
for module in module_specs:
try:
if self._is_module_installed(module):
response['results'].append("Module {0} upgraded.".format(module))
self.module_base.upgrade([module])
except dnf.exceptions.MarkingErrors as e:
failure_response['failures'].append(' '.join((module, to_native(e))))
for group in groups:
try:
try:
self.base.group_upgrade(group)
response['results'].append("Group {0} upgraded.".format(group))
except dnf.exceptions.CompsError:
if not self.update_only:
# If not already installed, try to install.
group_pkg_count_installed = self.base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES)
if group_pkg_count_installed == 0:
response['results'].append("Group {0} already installed.".format(group))
else:
response['results'].append("Group {0} installed.".format(group))
except dnf.exceptions.Error as e:
failure_response['failures'].append(" ".join((group, to_native(e))))
for environment in environments:
try:
try:
self.base.environment_upgrade(environment)
except dnf.exceptions.CompsError:
# If not already installed, try to install.
self.base.environment_install(environment, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured attempting to install environment: {0}".format(environment)
except dnf.exceptions.Error as e:
failure_response['failures'].append(" ".join((environment, to_native(e))))
if self.update_only:
not_installed = self._update_only(pkg_specs)
for spec in not_installed:
response['results'].append("Packages providing %s not installed due to update_only specified" % spec)
else:
for pkg_spec in pkg_specs:
# best effort causes to install the latest package
# even if not previously installed
self.base.conf.best = True
install_result = self._mark_package_install(pkg_spec, upgrade=True)
if install_result['failed']:
if install_result['msg']:
failure_response['msg'] += install_result['msg']
failure_response['failures'].append(self._sanitize_dnf_error_msg_install(pkg_spec, install_result['failure']))
else:
if install_result['msg']:
response['results'].append(install_result['msg'])
else:
# state == absent
if filenames:
self.module.fail_json(
msg="Cannot remove paths -- please specify package name.",
results=[],
)
# Remove modules
if module_specs and self.with_modules:
for module in module_specs:
try:
if self._is_module_installed(module):
response['results'].append("Module {0} removed.".format(module))
self.module_base.remove([module])
self.module_base.disable([module])
self.module_base.reset([module])
except dnf.exceptions.MarkingErrors as e:
failure_response['failures'].append(' '.join((module, to_native(e))))
for group in groups:
try:
self.base.group_remove(group)
except dnf.exceptions.CompsError:
# Group is already uninstalled.
pass
except AttributeError:
# Group either isn't installed or wasn't marked installed at install time
# because of DNF bug
#
# This is necessary until the upstream dnf API bug is fixed where installing
# a group via the dnf API doesn't actually mark the group as installed
# https://bugzilla.redhat.com/show_bug.cgi?id=1620324
pass
for environment in environments:
try:
self.base.environment_remove(environment)
except dnf.exceptions.CompsError:
# Environment is already uninstalled.
pass
installed = self.base.sack.query().installed()
for pkg_spec in pkg_specs:
# short-circuit installed check for wildcard matching
if '*' in pkg_spec:
try:
self.base.remove(pkg_spec)
except dnf.exceptions.MarkingError as e:
is_failure, handled_remove_error = self._sanitize_dnf_error_msg_remove(pkg_spec, to_native(e))
if is_failure:
failure_response['failures'].append('{0} - {1}'.format(pkg_spec, to_native(e)))
else:
response['results'].append(handled_remove_error)
continue
installed_pkg = dnf.subject.Subject(pkg_spec).get_best_query(
sack=self.base.sack).installed().run()
for pkg in installed_pkg:
self.base.remove(str(pkg))
# Like the dnf CLI we want to allow recursive removal of dependent
# packages
self.allowerasing = True
if self.autoremove:
self.base.autoremove()
try:
if not self.base.resolve(allow_erasing=self.allowerasing):
if failure_response['failures']:
failure_response['msg'] = 'Failed to install some of the specified packages'
self.module.fail_json(**failure_response)
response['msg'] = "Nothing to do"
self.module.exit_json(**response)
else:
response['changed'] = True
# If packages got installed/removed, add them to the results.
# We do this early so we can use it for both check_mode and not.
if self.download_only:
install_action = 'Downloaded'
else:
install_action = 'Installed'
for package in self.base.transaction.install_set:
response['results'].append("{0}: {1}".format(install_action, package))
for package in self.base.transaction.remove_set:
response['results'].append("Removed: {0}".format(package))
if failure_response['failures']:
failure_response['msg'] = 'Failed to install some of the specified packages'
self.module.fail_json(**failure_response)
if self.module.check_mode:
response['msg'] = "Check mode: No changes made, but would have if not in check mode"
self.module.exit_json(**response)
try:
if self.download_only and self.download_dir and self.base.conf.destdir:
dnf.util.ensure_dir(self.base.conf.destdir)
self.base.repos.all().pkgdir = self.base.conf.destdir
self.base.download_packages(self.base.transaction.install_set)
except dnf.exceptions.DownloadError as e:
self.module.fail_json(
msg="Failed to download packages: {0}".format(to_text(e)),
results=[],
)
# Validate GPG. This is NOT done in dnf.Base (it's done in the
# upstream CLI subclass of dnf.Base)
if not self.disable_gpg_check:
for package in self.base.transaction.install_set:
fail = False
gpgres, gpgerr = self.base._sig_check_pkg(package)
if gpgres == 0: # validated successfully
continue
elif gpgres == 1: # validation failed, install cert?
try:
self.base._get_key_for_package(package)
except dnf.exceptions.Error as e:
fail = True
else: # fatal error
fail = True
if fail:
msg = 'Failed to validate GPG signature for {0}'.format(package)
self.module.fail_json(msg)
if self.download_only:
# No further work left to do, and the results were already updated above.
# Just return them.
self.module.exit_json(**response)
else:
self.base.do_transaction()
if failure_response['failures']:
failure_response['msg'] = 'Failed to install some of the specified packages'
self.module.exit_json(**response)
self.module.exit_json(**response)
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured: {0}".format(to_native(e))
self.module.fail_json(**failure_response)
except dnf.exceptions.Error as e:
if to_text("already installed") in to_text(e):
response['changed'] = False
response['results'].append("Package already installed: {0}".format(to_native(e)))
self.module.exit_json(**response)
else:
failure_response['msg'] = "Unknown Error occured: {0}".format(to_native(e))
self.module.fail_json(**failure_response)
@staticmethod
def has_dnf():
return HAS_DNF
def run(self):
"""The main function."""
# Check if autoremove is called correctly
if self.autoremove:
if LooseVersion(dnf.__version__) < LooseVersion('2.0.1'):
self.module.fail_json(
msg="Autoremove requires dnf>=2.0.1. Current dnf version is %s" % dnf.__version__,
results=[],
)
# Check if download_dir is called correctly
if self.download_dir:
if LooseVersion(dnf.__version__) < LooseVersion('2.6.2'):
self.module.fail_json(
msg="download_dir requires dnf>=2.6.2. Current dnf version is %s" % dnf.__version__,
results=[],
)
if self.update_cache and not self.names and not self.list:
self.base = self._base(
self.conf_file, self.disable_gpg_check, self.disablerepo,
self.enablerepo, self.installroot
)
self.module.exit_json(
msg="Cache updated",
changed=False,
results=[],
rc=0
)
# Set state as installed by default
# This is not set in AnsibleModule() because the following shouldn't happen
# - dnf: autoremove=yes state=installed
if self.state is None:
self.state = 'installed'
if self.list:
self.base = self._base(
self.conf_file, self.disable_gpg_check, self.disablerepo,
self.enablerepo, self.installroot
)
self.list_items(self.list)
else:
# Note: base takes a long time to run so we want to check for failure
# before running it.
if not dnf.util.am_i_root():
self.module.fail_json(
msg="This command has to be run under the root user.",
results=[],
)
self.base = self._base(
self.conf_file, self.disable_gpg_check, self.disablerepo,
self.enablerepo, self.installroot
)
if self.with_modules:
self.module_base = dnf.module.module_base.ModuleBase(self.base)
self.ensure()
def main():
# state=installed name=pkgspec
# state=removed name=pkgspec
# state=latest name=pkgspec
#
# informational commands:
# list=installed
# list=updates
# list=available
# list=repos
# list=pkgspec
# Extend yumdnf_argument_spec with dnf-specific features that will never be
# backported to yum because yum is now in "maintenance mode" upstream
yumdnf_argument_spec['argument_spec']['allowerasing'] = dict(default=False, type='bool')
yumdnf_argument_spec['argument_spec']['nobest'] = dict(default=False, type='bool')
module = AnsibleModule(
**yumdnf_argument_spec
)
module_implementation = DnfModule(module)
try:
module_implementation.run()
except dnf.exceptions.RepoError as de:
module.fail_json(
msg="Failed to synchronize repodata: {0}".format(to_native(de)),
rc=1,
results=[],
changed=False
)
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 69,397 |
Feature: yum --cacheonly
|
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
Add a --cacheonly option to the ansible yum module, similar to the --downloadonly option.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
lib/ansible/modules/packaging/os/yum.py
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
We have a group of Oracle servers that have no repository management server. They update directly from Oracle's repositories. Our thought is to do a "yum update --downloadonly" on all dev and prod servers. Then we can update the dev servers first using "yum update --cacheonly" to test then updated and run the same command on the prod at a later time to make them consistent with the dev.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/69397
|
https://github.com/ansible/ansible/pull/73820
|
0c101f3f769b9aaed1a99f8858197f4d28b11034
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
| 2020-05-08T18:48:18Z |
python
| 2021-04-16T15:08:52Z |
lib/ansible/modules/yum.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Red Hat, Inc
# Written by Seth Vidal <skvidal at fedoraproject.org>
# Copyright: (c) 2014, Epic Games, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: yum
version_added: historical
short_description: Manages packages with the I(yum) package manager
description:
- Installs, upgrade, downgrades, removes, and lists packages and groups with the I(yum) package manager.
- This module only works on Python 2. If you require Python 3 support see the M(ansible.builtin.dnf) module.
options:
use_backend:
description:
- This module supports C(yum) (as it always has), this is known as C(yum3)/C(YUM3)/C(yum-deprecated) by
upstream yum developers. As of Ansible 2.7+, this module also supports C(YUM4), which is the
"new yum" and it has an C(dnf) backend.
- By default, this module will select the backend based on the C(ansible_pkg_mgr) fact.
default: "auto"
choices: [ auto, yum, yum4, dnf ]
type: str
version_added: "2.7"
name:
description:
- A package name or package specifier with version, like C(name-1.0).
- Comparison operators for package version are valid here C(>), C(<), C(>=), C(<=). Example - C(name>=1.0)
- If a previous version is specified, the task also needs to turn C(allow_downgrade) on.
See the C(allow_downgrade) documentation for caveats with downgrading packages.
- When using state=latest, this can be C('*') which means run C(yum -y update).
- You can also pass a url or a local path to a rpm file (using state=present).
To operate on several packages this can accept a comma separated string of packages or (as of 2.0) a list of packages.
aliases: [ pkg ]
type: list
elements: str
exclude:
description:
- Package name(s) to exclude when state=present, or latest
type: list
elements: str
version_added: "2.0"
list:
description:
- "Package name to run the equivalent of yum list --show-duplicates <package> against. In addition to listing packages,
use can also list the following: C(installed), C(updates), C(available) and C(repos)."
- This parameter is mutually exclusive with C(name).
type: str
state:
description:
- Whether to install (C(present) or C(installed), C(latest)), or remove (C(absent) or C(removed)) a package.
- C(present) and C(installed) will simply ensure that a desired package is installed.
- C(latest) will update the specified package if it's not of the latest available version.
- C(absent) and C(removed) will remove the specified package.
- Default is C(None), however in effect the default action is C(present) unless the C(autoremove) option is
enabled for this module, then C(absent) is inferred.
type: str
choices: [ absent, installed, latest, present, removed ]
enablerepo:
description:
- I(Repoid) of repositories to enable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a C(",").
- As of Ansible 2.7, this can alternatively be a list instead of C(",")
separated string
type: list
elements: str
version_added: "0.9"
disablerepo:
description:
- I(Repoid) of repositories to disable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a C(",").
- As of Ansible 2.7, this can alternatively be a list instead of C(",")
separated string
type: list
elements: str
version_added: "0.9"
conf_file:
description:
- The remote yum configuration file to use for the transaction.
type: str
version_added: "0.6"
disable_gpg_check:
description:
- Whether to disable the GPG checking of signatures of packages being
installed. Has an effect only if state is I(present) or I(latest).
type: bool
default: "no"
version_added: "1.2"
skip_broken:
description:
- Skip packages with broken dependencies(devsolve) and are causing problems.
type: bool
default: "no"
version_added: "2.3"
update_cache:
description:
- Force yum to check if cache is out of date and redownload if needed.
Has an effect only if state is I(present) or I(latest).
type: bool
default: "no"
aliases: [ expire-cache ]
version_added: "1.9"
validate_certs:
description:
- This only applies if using a https url as the source of the rpm. e.g. for localinstall. If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates as it avoids verifying the source site.
- Prior to 2.1 the code worked as if this was set to C(yes).
type: bool
default: "yes"
version_added: "2.1"
update_only:
description:
- When using latest, only update installed packages. Do not install packages.
- Has an effect only if state is I(latest)
default: "no"
type: bool
version_added: "2.5"
installroot:
description:
- Specifies an alternative installroot, relative to which all packages
will be installed.
default: "/"
type: str
version_added: "2.3"
security:
description:
- If set to C(yes), and C(state=latest) then only installs updates that have been marked security related.
type: bool
default: "no"
version_added: "2.4"
bugfix:
description:
- If set to C(yes), and C(state=latest) then only installs updates that have been marked bugfix related.
default: "no"
type: bool
version_added: "2.6"
allow_downgrade:
description:
- Specify if the named package and version is allowed to downgrade
a maybe already installed higher version of that package.
Note that setting allow_downgrade=True can make this module
behave in a non-idempotent way. The task could end up with a set
of packages that does not match the complete list of specified
packages to install (because dependencies between the downgraded
package and others can cause changes to the packages which were
in the earlier transaction).
type: bool
default: "no"
version_added: "2.4"
enable_plugin:
description:
- I(Plugin) name to enable for the install/update operation.
The enabled plugin will not persist beyond the transaction.
type: list
elements: str
version_added: "2.5"
disable_plugin:
description:
- I(Plugin) name to disable for the install/update operation.
The disabled plugins will not persist beyond the transaction.
type: list
elements: str
version_added: "2.5"
releasever:
description:
- Specifies an alternative release from which all packages will be
installed.
type: str
version_added: "2.7"
autoremove:
description:
- If C(yes), removes all "leaf" packages from the system that were originally
installed as dependencies of user-installed packages but which are no longer
required by any such package. Should be used alone or when state is I(absent)
- "NOTE: This feature requires yum >= 3.4.3 (RHEL/CentOS 7+)"
type: bool
default: "no"
version_added: "2.7"
disable_excludes:
description:
- Disable the excludes defined in YUM config files.
- If set to C(all), disables all excludes.
- If set to C(main), disable excludes defined in [main] in yum.conf.
- If set to C(repoid), disable excludes defined for given repo id.
type: str
version_added: "2.7"
download_only:
description:
- Only download the packages, do not install them.
default: "no"
type: bool
version_added: "2.7"
lock_timeout:
description:
- Amount of time to wait for the yum lockfile to be freed.
required: false
default: 30
type: int
version_added: "2.8"
install_weak_deps:
description:
- Will also install all packages linked by a weak dependency relation.
- "NOTE: This feature requires yum >= 4 (RHEL/CentOS 8+)"
type: bool
default: "yes"
version_added: "2.8"
download_dir:
description:
- Specifies an alternate directory to store packages.
- Has an effect only if I(download_only) is specified.
type: str
version_added: "2.8"
install_repoquery:
description:
- If repoquery is not available, install yum-utils. If the system is
registered to RHN or an RHN Satellite, repoquery allows for querying
all channels assigned to the system. It is also required to use the
'list' parameter.
- "NOTE: This will run and be logged as a separate yum transation which
takes place before any other installation or removal."
- "NOTE: This will use the system's default enabled repositories without
regard for disablerepo/enablerepo given to the module."
required: false
version_added: "1.5"
default: "yes"
type: bool
notes:
- When used with a `loop:` each package will be processed individually,
it is much more efficient to pass the list directly to the `name` option.
- In versions prior to 1.9.2 this module installed and removed each package
given to the yum module separately. This caused problems when packages
specified by filename or url had to be installed or removed together. In
1.9.2 this was fixed so that packages are installed in one yum
transaction. However, if one of the packages adds a new yum repository
that the other packages come from (such as epel-release) then that package
needs to be installed in a separate task. This mimics yum's command line
behaviour.
- 'Yum itself has two types of groups. "Package groups" are specified in the
rpm itself while "environment groups" are specified in a separate file
(usually by the distribution). Unfortunately, this division becomes
apparent to ansible users because ansible needs to operate on the group
of packages in a single transaction and yum requires groups to be specified
in different ways when used in that way. Package groups are specified as
"@development-tools" and environment groups are "@^gnome-desktop-environment".
Use the "yum group list hidden ids" command to see which category of group the group
you want to install falls into.'
- 'The yum module does not support clearing yum cache in an idempotent way, so it
was decided not to implement it, the only method is to use command and call the yum
command directly, namely "command: yum clean all"
https://github.com/ansible/ansible/pull/31450#issuecomment-352889579'
# informational: requirements for nodes
requirements:
- yum
author:
- Ansible Core Team
- Seth Vidal (@skvidal)
- Eduard Snesarev (@verm666)
- Berend De Schouwer (@berenddeschouwer)
- Abhijeet Kasurde (@Akasurde)
- Adam Miller (@maxamillion)
'''
EXAMPLES = '''
- name: Install the latest version of Apache
yum:
name: httpd
state: latest
- name: Install Apache >= 2.4
yum:
name: httpd>=2.4
state: present
- name: Install a list of packages (suitable replacement for 2.11 loop deprecation warning)
yum:
name:
- nginx
- postgresql
- postgresql-server
state: present
- name: Install a list of packages with a list variable
yum:
name: "{{ packages }}"
vars:
packages:
- httpd
- httpd-tools
- name: Remove the Apache package
yum:
name: httpd
state: absent
- name: Install the latest version of Apache from the testing repo
yum:
name: httpd
enablerepo: testing
state: present
- name: Install one specific version of Apache
yum:
name: httpd-2.2.29-1.4.amzn1
state: present
- name: Upgrade all packages
yum:
name: '*'
state: latest
- name: Upgrade all packages, excluding kernel & foo related packages
yum:
name: '*'
state: latest
exclude: kernel*,foo*
- name: Install the nginx rpm from a remote repo
yum:
name: http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm
state: present
- name: Install nginx rpm from a local file
yum:
name: /usr/local/src/nginx-release-centos-6-0.el6.ngx.noarch.rpm
state: present
- name: Install the 'Development tools' package group
yum:
name: "@Development tools"
state: present
- name: Install the 'Gnome desktop' environment group
yum:
name: "@^gnome-desktop-environment"
state: present
- name: List ansible packages and register result to print with debug later
yum:
list: ansible
register: result
- name: Install package with multiple repos enabled
yum:
name: sos
enablerepo: "epel,ol7_latest"
- name: Install package with multiple repos disabled
yum:
name: sos
disablerepo: "epel,ol7_latest"
- name: Download the nginx package but do not install it
yum:
name:
- nginx
state: latest
download_only: true
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.respawn import has_respawned, respawn_module
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.yumdnf import YumDnf, yumdnf_argument_spec
import errno
import os
import re
import sys
import tempfile
try:
import rpm
HAS_RPM_PYTHON = True
except ImportError:
HAS_RPM_PYTHON = False
try:
import yum
HAS_YUM_PYTHON = True
except ImportError:
HAS_YUM_PYTHON = False
try:
from yum.misc import find_unfinished_transactions, find_ts_remaining
from rpmUtils.miscutils import splitFilename, compareEVR
transaction_helpers = True
except ImportError:
transaction_helpers = False
from contextlib import contextmanager
from ansible.module_utils.urls import fetch_file
def_qf = "%{epoch}:%{name}-%{version}-%{release}.%{arch}"
rpmbin = None
class YumModule(YumDnf):
"""
Yum Ansible module back-end implementation
"""
def __init__(self, module):
# state=installed name=pkgspec
# state=removed name=pkgspec
# state=latest name=pkgspec
#
# informational commands:
# list=installed
# list=updates
# list=available
# list=repos
# list=pkgspec
# This populates instance vars for all argument spec params
super(YumModule, self).__init__(module)
self.pkg_mgr_name = "yum"
self.lockfile = '/var/run/yum.pid'
self._yum_base = None
def _enablerepos_with_error_checking(self):
# NOTE: This seems unintuitive, but it mirrors yum's CLI behavior
if len(self.enablerepo) == 1:
try:
self.yum_base.repos.enableRepo(self.enablerepo[0])
except yum.Errors.YumBaseError as e:
if u'repository not found' in to_text(e):
self.module.fail_json(msg="Repository %s not found." % self.enablerepo[0])
else:
raise e
else:
for rid in self.enablerepo:
try:
self.yum_base.repos.enableRepo(rid)
except yum.Errors.YumBaseError as e:
if u'repository not found' in to_text(e):
self.module.warn("Repository %s not found." % rid)
else:
raise e
def is_lockfile_pid_valid(self):
try:
try:
with open(self.lockfile, 'r') as f:
oldpid = int(f.readline())
except ValueError:
# invalid data
os.unlink(self.lockfile)
return False
if oldpid == os.getpid():
# that's us?
os.unlink(self.lockfile)
return False
try:
with open("/proc/%d/stat" % oldpid, 'r') as f:
stat = f.readline()
if stat.split()[2] == 'Z':
# Zombie
os.unlink(self.lockfile)
return False
except IOError:
# either /proc is not mounted or the process is already dead
try:
# check the state of the process
os.kill(oldpid, 0)
except OSError as e:
if e.errno == errno.ESRCH:
# No such process
os.unlink(self.lockfile)
return False
self.module.fail_json(msg="Unable to check PID %s in %s: %s" % (oldpid, self.lockfile, to_native(e)))
except (IOError, OSError) as e:
# lockfile disappeared?
return False
# another copy seems to be running
return True
@property
def yum_base(self):
if self._yum_base:
return self._yum_base
else:
# Only init once
self._yum_base = yum.YumBase()
self._yum_base.preconf.debuglevel = 0
self._yum_base.preconf.errorlevel = 0
self._yum_base.preconf.plugins = True
self._yum_base.preconf.enabled_plugins = self.enable_plugin
self._yum_base.preconf.disabled_plugins = self.disable_plugin
if self.releasever:
self._yum_base.preconf.releasever = self.releasever
if self.installroot != '/':
# do not setup installroot by default, because of error
# CRITICAL:yum.cli:Config Error: Error accessing file for config file:////etc/yum.conf
# in old yum version (like in CentOS 6.6)
self._yum_base.preconf.root = self.installroot
self._yum_base.conf.installroot = self.installroot
if self.conf_file and os.path.exists(self.conf_file):
self._yum_base.preconf.fn = self.conf_file
if os.geteuid() != 0:
if hasattr(self._yum_base, 'setCacheDir'):
self._yum_base.setCacheDir()
else:
cachedir = yum.misc.getCacheDir()
self._yum_base.repos.setCacheDir(cachedir)
self._yum_base.conf.cache = 0
if self.disable_excludes:
self._yum_base.conf.disable_excludes = self.disable_excludes
# A sideeffect of accessing conf is that the configuration is
# loaded and plugins are discovered
self.yum_base.conf
try:
for rid in self.disablerepo:
self.yum_base.repos.disableRepo(rid)
self._enablerepos_with_error_checking()
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
return self._yum_base
def po_to_envra(self, po):
if hasattr(po, 'ui_envra'):
return po.ui_envra
return '%s:%s-%s-%s.%s' % (po.epoch, po.name, po.version, po.release, po.arch)
def is_group_env_installed(self, name):
name_lower = name.lower()
if yum.__version_info__ >= (3, 4):
groups_list = self.yum_base.doGroupLists(return_evgrps=True)
else:
groups_list = self.yum_base.doGroupLists()
# list of the installed groups on the first index
groups = groups_list[0]
for group in groups:
if name_lower.endswith(group.name.lower()) or name_lower.endswith(group.groupid.lower()):
return True
if yum.__version_info__ >= (3, 4):
# list of the installed env_groups on the third index
envs = groups_list[2]
for env in envs:
if name_lower.endswith(env.name.lower()) or name_lower.endswith(env.environmentid.lower()):
return True
return False
def is_installed(self, repoq, pkgspec, qf=None, is_pkg=False):
if qf is None:
qf = "%{epoch}:%{name}-%{version}-%{release}.%{arch}\n"
if not repoq:
pkgs = []
try:
e, m, _ = self.yum_base.rpmdb.matchPackageNames([pkgspec])
pkgs = e + m
if not pkgs and not is_pkg:
pkgs.extend(self.yum_base.returnInstalledPackagesByDep(pkgspec))
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
return [self.po_to_envra(p) for p in pkgs]
else:
global rpmbin
if not rpmbin:
rpmbin = self.module.get_bin_path('rpm', required=True)
cmd = [rpmbin, '-q', '--qf', qf, pkgspec]
if self.installroot != '/':
cmd.extend(['--root', self.installroot])
# rpm localizes messages and we're screen scraping so make sure we use
# the C locale
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = self.module.run_command(cmd, environ_update=lang_env)
if rc != 0 and 'is not installed' not in out:
self.module.fail_json(msg='Error from rpm: %s: %s' % (cmd, err))
if 'is not installed' in out:
out = ''
pkgs = [p for p in out.replace('(none)', '0').split('\n') if p.strip()]
if not pkgs and not is_pkg:
cmd = [rpmbin, '-q', '--qf', qf, '--whatprovides', pkgspec]
if self.installroot != '/':
cmd.extend(['--root', self.installroot])
rc2, out2, err2 = self.module.run_command(cmd, environ_update=lang_env)
else:
rc2, out2, err2 = (0, '', '')
if rc2 != 0 and 'no package provides' not in out2:
self.module.fail_json(msg='Error from rpm: %s: %s' % (cmd, err + err2))
if 'no package provides' in out2:
out2 = ''
pkgs += [p for p in out2.replace('(none)', '0').split('\n') if p.strip()]
return pkgs
return []
def is_available(self, repoq, pkgspec, qf=def_qf):
if not repoq:
pkgs = []
try:
e, m, _ = self.yum_base.pkgSack.matchPackageNames([pkgspec])
pkgs = e + m
if not pkgs:
pkgs.extend(self.yum_base.returnPackagesByDep(pkgspec))
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
return [self.po_to_envra(p) for p in pkgs]
else:
myrepoq = list(repoq)
r_cmd = ['--disablerepo', ','.join(self.disablerepo)]
myrepoq.extend(r_cmd)
r_cmd = ['--enablerepo', ','.join(self.enablerepo)]
myrepoq.extend(r_cmd)
if self.releasever:
myrepoq.extend('--releasever=%s' % self.releasever)
cmd = myrepoq + ["--qf", qf, pkgspec]
rc, out, err = self.module.run_command(cmd)
if rc == 0:
return [p for p in out.split('\n') if p.strip()]
else:
self.module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err))
return []
def is_update(self, repoq, pkgspec, qf=def_qf):
if not repoq:
pkgs = []
updates = []
try:
pkgs = self.yum_base.returnPackagesByDep(pkgspec) + \
self.yum_base.returnInstalledPackagesByDep(pkgspec)
if not pkgs:
e, m, _ = self.yum_base.pkgSack.matchPackageNames([pkgspec])
pkgs = e + m
updates = self.yum_base.doPackageLists(pkgnarrow='updates').updates
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
retpkgs = (pkg for pkg in pkgs if pkg in updates)
return set(self.po_to_envra(p) for p in retpkgs)
else:
myrepoq = list(repoq)
r_cmd = ['--disablerepo', ','.join(self.disablerepo)]
myrepoq.extend(r_cmd)
r_cmd = ['--enablerepo', ','.join(self.enablerepo)]
myrepoq.extend(r_cmd)
if self.releasever:
myrepoq.extend('--releasever=%s' % self.releasever)
cmd = myrepoq + ["--pkgnarrow=updates", "--qf", qf, pkgspec]
rc, out, err = self.module.run_command(cmd)
if rc == 0:
return set(p for p in out.split('\n') if p.strip())
else:
self.module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err))
return set()
def what_provides(self, repoq, req_spec, qf=def_qf):
if not repoq:
pkgs = []
try:
try:
pkgs = self.yum_base.returnPackagesByDep(req_spec) + \
self.yum_base.returnInstalledPackagesByDep(req_spec)
except Exception as e:
# If a repo with `repo_gpgcheck=1` is added and the repo GPG
# key was never accepted, querying this repo will throw an
# error: 'repomd.xml signature could not be verified'. In that
# situation we need to run `yum -y makecache` which will accept
# the key and try again.
if 'repomd.xml signature could not be verified' in to_native(e):
if self.releasever:
self.module.run_command(self.yum_basecmd + ['makecache'] + ['--releasever=%s' % self.releasever])
else:
self.module.run_command(self.yum_basecmd + ['makecache'])
pkgs = self.yum_base.returnPackagesByDep(req_spec) + \
self.yum_base.returnInstalledPackagesByDep(req_spec)
else:
raise
if not pkgs:
e, m, _ = self.yum_base.pkgSack.matchPackageNames([req_spec])
pkgs.extend(e)
pkgs.extend(m)
e, m, _ = self.yum_base.rpmdb.matchPackageNames([req_spec])
pkgs.extend(e)
pkgs.extend(m)
except Exception as e:
self.module.fail_json(msg="Failure talking to yum: %s" % to_native(e))
return set(self.po_to_envra(p) for p in pkgs)
else:
myrepoq = list(repoq)
r_cmd = ['--disablerepo', ','.join(self.disablerepo)]
myrepoq.extend(r_cmd)
r_cmd = ['--enablerepo', ','.join(self.enablerepo)]
myrepoq.extend(r_cmd)
if self.releasever:
myrepoq.extend('--releasever=%s' % self.releasever)
cmd = myrepoq + ["--qf", qf, "--whatprovides", req_spec]
rc, out, err = self.module.run_command(cmd)
cmd = myrepoq + ["--qf", qf, req_spec]
rc2, out2, err2 = self.module.run_command(cmd)
if rc == 0 and rc2 == 0:
out += out2
pkgs = set([p for p in out.split('\n') if p.strip()])
if not pkgs:
pkgs = self.is_installed(repoq, req_spec, qf=qf)
return pkgs
else:
self.module.fail_json(msg='Error from repoquery: %s: %s' % (cmd, err + err2))
return set()
def transaction_exists(self, pkglist):
"""
checks the package list to see if any packages are
involved in an incomplete transaction
"""
conflicts = []
if not transaction_helpers:
return conflicts
# first, we create a list of the package 'nvreas'
# so we can compare the pieces later more easily
pkglist_nvreas = (splitFilename(pkg) for pkg in pkglist)
# next, we build the list of packages that are
# contained within an unfinished transaction
unfinished_transactions = find_unfinished_transactions()
for trans in unfinished_transactions:
steps = find_ts_remaining(trans)
for step in steps:
# the action is install/erase/etc., but we only
# care about the package spec contained in the step
(action, step_spec) = step
(n, v, r, e, a) = splitFilename(step_spec)
# and see if that spec is in the list of packages
# requested for installation/updating
for pkg in pkglist_nvreas:
# if the name and arch match, we're going to assume
# this package is part of a pending transaction
# the label is just for display purposes
label = "%s-%s" % (n, a)
if n == pkg[0] and a == pkg[4]:
if label not in conflicts:
conflicts.append("%s-%s" % (n, a))
break
return conflicts
def local_envra(self, path):
"""return envra of a local rpm passed in"""
ts = rpm.TransactionSet()
ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
fd = os.open(path, os.O_RDONLY)
try:
header = ts.hdrFromFdno(fd)
except rpm.error as e:
return None
finally:
os.close(fd)
return '%s:%s-%s-%s.%s' % (
header[rpm.RPMTAG_EPOCH] or '0',
header[rpm.RPMTAG_NAME],
header[rpm.RPMTAG_VERSION],
header[rpm.RPMTAG_RELEASE],
header[rpm.RPMTAG_ARCH]
)
@contextmanager
def set_env_proxy(self):
# setting system proxy environment and saving old, if exists
namepass = ""
scheme = ["http", "https"]
old_proxy_env = [os.getenv("http_proxy"), os.getenv("https_proxy")]
try:
# "_none_" is a special value to disable proxy in yum.conf/*.repo
if self.yum_base.conf.proxy and self.yum_base.conf.proxy not in ("_none_",):
if self.yum_base.conf.proxy_username:
namepass = namepass + self.yum_base.conf.proxy_username
proxy_url = self.yum_base.conf.proxy
if self.yum_base.conf.proxy_password:
namepass = namepass + ":" + self.yum_base.conf.proxy_password
elif '@' in self.yum_base.conf.proxy:
namepass = self.yum_base.conf.proxy.split('@')[0].split('//')[-1]
proxy_url = self.yum_base.conf.proxy.replace("{0}@".format(namepass), "")
if namepass:
namepass = namepass + '@'
for item in scheme:
os.environ[item + "_proxy"] = re.sub(
r"(http://)",
r"\g<1>" + namepass, proxy_url
)
else:
for item in scheme:
os.environ[item + "_proxy"] = self.yum_base.conf.proxy
yield
except yum.Errors.YumBaseError:
raise
finally:
# revert back to previously system configuration
for item in scheme:
if os.getenv("{0}_proxy".format(item)):
del os.environ["{0}_proxy".format(item)]
if old_proxy_env[0]:
os.environ["http_proxy"] = old_proxy_env[0]
if old_proxy_env[1]:
os.environ["https_proxy"] = old_proxy_env[1]
def pkg_to_dict(self, pkgstr):
if pkgstr.strip() and pkgstr.count('|') == 5:
n, e, v, r, a, repo = pkgstr.split('|')
else:
return {'error_parsing': pkgstr}
d = {
'name': n,
'arch': a,
'epoch': e,
'release': r,
'version': v,
'repo': repo,
'envra': '%s:%s-%s-%s.%s' % (e, n, v, r, a)
}
if repo == 'installed':
d['yumstate'] = 'installed'
else:
d['yumstate'] = 'available'
return d
def repolist(self, repoq, qf="%{repoid}"):
cmd = repoq + ["--qf", qf, "-a"]
if self.releasever:
cmd.extend(['--releasever=%s' % self.releasever])
rc, out, _ = self.module.run_command(cmd)
if rc == 0:
return set(p for p in out.split('\n') if p.strip())
else:
return []
def list_stuff(self, repoquerybin, stuff):
qf = "%{name}|%{epoch}|%{version}|%{release}|%{arch}|%{repoid}"
# is_installed goes through rpm instead of repoquery so it needs a slightly different format
is_installed_qf = "%{name}|%{epoch}|%{version}|%{release}|%{arch}|installed\n"
repoq = [repoquerybin, '--show-duplicates', '--plugins', '--quiet']
if self.disablerepo:
repoq.extend(['--disablerepo', ','.join(self.disablerepo)])
if self.enablerepo:
repoq.extend(['--enablerepo', ','.join(self.enablerepo)])
if self.installroot != '/':
repoq.extend(['--installroot', self.installroot])
if self.conf_file and os.path.exists(self.conf_file):
repoq += ['-c', self.conf_file]
if stuff == 'installed':
return [self.pkg_to_dict(p) for p in sorted(self.is_installed(repoq, '-a', qf=is_installed_qf)) if p.strip()]
if stuff == 'updates':
return [self.pkg_to_dict(p) for p in sorted(self.is_update(repoq, '-a', qf=qf)) if p.strip()]
if stuff == 'available':
return [self.pkg_to_dict(p) for p in sorted(self.is_available(repoq, '-a', qf=qf)) if p.strip()]
if stuff == 'repos':
return [dict(repoid=name, state='enabled') for name in sorted(self.repolist(repoq)) if name.strip()]
return [
self.pkg_to_dict(p) for p in
sorted(self.is_installed(repoq, stuff, qf=is_installed_qf) + self.is_available(repoq, stuff, qf=qf))
if p.strip()
]
def exec_install(self, items, action, pkgs, res):
cmd = self.yum_basecmd + [action] + pkgs
if self.releasever:
cmd.extend(['--releasever=%s' % self.releasever])
if self.module.check_mode:
self.module.exit_json(changed=True, results=res['results'], changes=dict(installed=pkgs))
else:
res['changes'] = dict(installed=pkgs)
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = self.module.run_command(cmd, environ_update=lang_env)
if rc == 1:
for spec in items:
# Fail on invalid urls:
if ('://' in spec and ('No package %s available.' % spec in out or 'Cannot open: %s. Skipping.' % spec in err)):
err = 'Package at %s could not be installed' % spec
self.module.fail_json(changed=False, msg=err, rc=rc)
res['rc'] = rc
res['results'].append(out)
res['msg'] += err
res['changed'] = True
if ('Nothing to do' in out and rc == 0) or ('does not have any packages' in err):
res['changed'] = False
if rc != 0:
res['changed'] = False
self.module.fail_json(**res)
# Fail if yum prints 'No space left on device' because that means some
# packages failed executing their post install scripts because of lack of
# free space (e.g. kernel package couldn't generate initramfs). Note that
# yum can still exit with rc=0 even if some post scripts didn't execute
# correctly.
if 'No space left on device' in (out or err):
res['changed'] = False
res['msg'] = 'No space left on device'
self.module.fail_json(**res)
# FIXME - if we did an install - go and check the rpmdb to see if it actually installed
# look for each pkg in rpmdb
# look for each pkg via obsoletes
return res
def install(self, items, repoq):
pkgs = []
downgrade_pkgs = []
res = {}
res['results'] = []
res['msg'] = ''
res['rc'] = 0
res['changed'] = False
for spec in items:
pkg = None
downgrade_candidate = False
# check if pkgspec is installed (if possible for idempotence)
if spec.endswith('.rpm') or '://' in spec:
if '://' not in spec and not os.path.exists(spec):
res['msg'] += "No RPM file matching '%s' found on system" % spec
res['results'].append("No RPM file matching '%s' found on system" % spec)
res['rc'] = 127 # Ensure the task fails in with-loop
self.module.fail_json(**res)
if '://' in spec:
with self.set_env_proxy():
package = fetch_file(self.module, spec)
if not package.endswith('.rpm'):
# yum requires a local file to have the extension of .rpm and we
# can not guarantee that from an URL (redirects, proxies, etc)
new_package_path = '%s.rpm' % package
os.rename(package, new_package_path)
package = new_package_path
else:
package = spec
# most common case is the pkg is already installed
envra = self.local_envra(package)
if envra is None:
self.module.fail_json(msg="Failed to get nevra information from RPM package: %s" % spec)
installed_pkgs = self.is_installed(repoq, envra)
if installed_pkgs:
res['results'].append('%s providing %s is already installed' % (installed_pkgs[0], package))
continue
(name, ver, rel, epoch, arch) = splitFilename(envra)
installed_pkgs = self.is_installed(repoq, name)
# case for two same envr but different archs like x86_64 and i686
if len(installed_pkgs) == 2:
(cur_name0, cur_ver0, cur_rel0, cur_epoch0, cur_arch0) = splitFilename(installed_pkgs[0])
(cur_name1, cur_ver1, cur_rel1, cur_epoch1, cur_arch1) = splitFilename(installed_pkgs[1])
cur_epoch0 = cur_epoch0 or '0'
cur_epoch1 = cur_epoch1 or '0'
compare = compareEVR((cur_epoch0, cur_ver0, cur_rel0), (cur_epoch1, cur_ver1, cur_rel1))
if compare == 0 and cur_arch0 != cur_arch1:
for installed_pkg in installed_pkgs:
if installed_pkg.endswith(arch):
installed_pkgs = [installed_pkg]
if len(installed_pkgs) == 1:
installed_pkg = installed_pkgs[0]
(cur_name, cur_ver, cur_rel, cur_epoch, cur_arch) = splitFilename(installed_pkg)
cur_epoch = cur_epoch or '0'
compare = compareEVR((cur_epoch, cur_ver, cur_rel), (epoch, ver, rel))
# compare > 0 -> higher version is installed
# compare == 0 -> exact version is installed
# compare < 0 -> lower version is installed
if compare > 0 and self.allow_downgrade:
downgrade_candidate = True
elif compare >= 0:
continue
# else: if there are more installed packages with the same name, that would mean
# kernel, gpg-pubkey or like, so just let yum deal with it and try to install it
pkg = package
# groups
elif spec.startswith('@'):
if self.is_group_env_installed(spec):
continue
pkg = spec
# range requires or file-requires or pkgname :(
else:
# most common case is the pkg is already installed and done
# short circuit all the bs - and search for it as a pkg in is_installed
# if you find it then we're done
if not set(['*', '?']).intersection(set(spec)):
installed_pkgs = self.is_installed(repoq, spec, is_pkg=True)
if installed_pkgs:
res['results'].append('%s providing %s is already installed' % (installed_pkgs[0], spec))
continue
# look up what pkgs provide this
pkglist = self.what_provides(repoq, spec)
if not pkglist:
res['msg'] += "No package matching '%s' found available, installed or updated" % spec
res['results'].append("No package matching '%s' found available, installed or updated" % spec)
res['rc'] = 126 # Ensure the task fails in with-loop
self.module.fail_json(**res)
# if any of the packages are involved in a transaction, fail now
# so that we don't hang on the yum operation later
conflicts = self.transaction_exists(pkglist)
if conflicts:
res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts)
res['rc'] = 125 # Ensure the task fails in with-loop
self.module.fail_json(**res)
# if any of them are installed
# then nothing to do
found = False
for this in pkglist:
if self.is_installed(repoq, this, is_pkg=True):
found = True
res['results'].append('%s providing %s is already installed' % (this, spec))
break
# if the version of the pkg you have installed is not in ANY repo, but there are
# other versions in the repos (both higher and lower) then the previous checks won't work.
# so we check one more time. This really only works for pkgname - not for file provides or virt provides
# but virt provides should be all caught in what_provides on its own.
# highly irritating
if not found:
if self.is_installed(repoq, spec):
found = True
res['results'].append('package providing %s is already installed' % (spec))
if found:
continue
# Downgrade - The yum install command will only install or upgrade to a spec version, it will
# not install an older version of an RPM even if specified by the install spec. So we need to
# determine if this is a downgrade, and then use the yum downgrade command to install the RPM.
if self.allow_downgrade:
for package in pkglist:
# Get the NEVRA of the requested package using pkglist instead of spec because pkglist
# contains consistently-formatted package names returned by yum, rather than user input
# that is often not parsed correctly by splitFilename().
(name, ver, rel, epoch, arch) = splitFilename(package)
# Check if any version of the requested package is installed
inst_pkgs = self.is_installed(repoq, name, is_pkg=True)
if inst_pkgs:
(cur_name, cur_ver, cur_rel, cur_epoch, cur_arch) = splitFilename(inst_pkgs[0])
compare = compareEVR((cur_epoch, cur_ver, cur_rel), (epoch, ver, rel))
if compare > 0:
downgrade_candidate = True
else:
downgrade_candidate = False
break
# If package needs to be installed/upgraded/downgraded, then pass in the spec
# we could get here if nothing provides it but that's not
# the error we're catching here
pkg = spec
if downgrade_candidate and self.allow_downgrade:
downgrade_pkgs.append(pkg)
else:
pkgs.append(pkg)
if downgrade_pkgs:
res = self.exec_install(items, 'downgrade', downgrade_pkgs, res)
if pkgs:
res = self.exec_install(items, 'install', pkgs, res)
return res
def remove(self, items, repoq):
pkgs = []
res = {}
res['results'] = []
res['msg'] = ''
res['changed'] = False
res['rc'] = 0
for pkg in items:
if pkg.startswith('@'):
installed = self.is_group_env_installed(pkg)
else:
installed = self.is_installed(repoq, pkg)
if installed:
pkgs.append(pkg)
else:
res['results'].append('%s is not installed' % pkg)
if pkgs:
if self.module.check_mode:
self.module.exit_json(changed=True, results=res['results'], changes=dict(removed=pkgs))
else:
res['changes'] = dict(removed=pkgs)
# run an actual yum transaction
if self.autoremove:
cmd = self.yum_basecmd + ["autoremove"] + pkgs
else:
cmd = self.yum_basecmd + ["remove"] + pkgs
rc, out, err = self.module.run_command(cmd)
res['rc'] = rc
res['results'].append(out)
res['msg'] = err
if rc != 0:
if self.autoremove and 'No such command' in out:
self.module.fail_json(msg='Version of YUM too old for autoremove: Requires yum 3.4.3 (RHEL/CentOS 7+)')
else:
self.module.fail_json(**res)
# compile the results into one batch. If anything is changed
# then mark changed
# at the end - if we've end up failed then fail out of the rest
# of the process
# at this point we check to see if the pkg is no longer present
self._yum_base = None # previous YumBase package index is now invalid
for pkg in pkgs:
if pkg.startswith('@'):
installed = self.is_group_env_installed(pkg)
else:
installed = self.is_installed(repoq, pkg, is_pkg=True)
if installed:
# Return a message so it's obvious to the user why yum failed
# and which package couldn't be removed. More details:
# https://github.com/ansible/ansible/issues/35672
res['msg'] = "Package '%s' couldn't be removed!" % pkg
self.module.fail_json(**res)
res['changed'] = True
return res
def run_check_update(self):
# run check-update to see if we have packages pending
if self.releasever:
rc, out, err = self.module.run_command(self.yum_basecmd + ['check-update'] + ['--releasever=%s' % self.releasever])
else:
rc, out, err = self.module.run_command(self.yum_basecmd + ['check-update'])
return rc, out, err
@staticmethod
def parse_check_update(check_update_output):
updates = {}
obsoletes = {}
# remove incorrect new lines in longer columns in output from yum check-update
# yum line wrapping can move the repo to the next line
#
# Meant to filter out sets of lines like:
# some_looooooooooooooooooooooooooooooooooooong_package_name 1:1.2.3-1.el7
# some-repo-label
#
# But it also needs to avoid catching lines like:
# Loading mirror speeds from cached hostfile
#
# ceph.x86_64 1:11.2.0-0.el7 ceph
# preprocess string and filter out empty lines so the regex below works
out = re.sub(r'\n[^\w]\W+(.*)', r' \1', check_update_output)
available_updates = out.split('\n')
# build update dictionary
for line in available_updates:
line = line.split()
# ignore irrelevant lines
# '*' in line matches lines like mirror lists:
# * base: mirror.corbina.net
# len(line) != 3 or 6 could be junk or a continuation
# len(line) = 6 is package obsoletes
#
# FIXME: what is the '.' not in line conditional for?
if '*' in line or len(line) not in [3, 6] or '.' not in line[0]:
continue
pkg, version, repo = line[0], line[1], line[2]
name, dist = pkg.rsplit('.', 1)
if name not in updates:
updates[name] = []
updates[name].append({'version': version, 'dist': dist, 'repo': repo})
if len(line) == 6:
obsolete_pkg, obsolete_version, obsolete_repo = line[3], line[4], line[5]
obsolete_name, obsolete_dist = obsolete_pkg.rsplit('.', 1)
if obsolete_name not in obsoletes:
obsoletes[obsolete_name] = []
obsoletes[obsolete_name].append({'version': obsolete_version, 'dist': obsolete_dist, 'repo': obsolete_repo})
return updates, obsoletes
def latest(self, items, repoq):
res = {}
res['results'] = []
res['msg'] = ''
res['changed'] = False
res['rc'] = 0
pkgs = {}
pkgs['update'] = []
pkgs['install'] = []
updates = {}
obsoletes = {}
update_all = False
cmd = None
# determine if we're doing an update all
if '*' in items:
update_all = True
rc, out, err = self.run_check_update()
if rc == 0 and update_all:
res['results'].append('Nothing to do here, all packages are up to date')
return res
elif rc == 100:
updates, obsoletes = self.parse_check_update(out)
elif rc == 1:
res['msg'] = err
res['rc'] = rc
self.module.fail_json(**res)
if update_all:
cmd = self.yum_basecmd + ['update']
will_update = set(updates.keys())
will_update_from_other_package = dict()
else:
will_update = set()
will_update_from_other_package = dict()
for spec in items:
# some guess work involved with groups. update @<group> will install the group if missing
if spec.startswith('@'):
pkgs['update'].append(spec)
will_update.add(spec)
continue
# check if pkgspec is installed (if possible for idempotence)
# localpkg
if spec.endswith('.rpm') and '://' not in spec:
if not os.path.exists(spec):
res['msg'] += "No RPM file matching '%s' found on system" % spec
res['results'].append("No RPM file matching '%s' found on system" % spec)
res['rc'] = 127 # Ensure the task fails in with-loop
self.module.fail_json(**res)
# get the pkg e:name-v-r.arch
envra = self.local_envra(spec)
if envra is None:
self.module.fail_json(msg="Failed to get nevra information from RPM package: %s" % spec)
# local rpm files can't be updated
if self.is_installed(repoq, envra):
pkgs['update'].append(spec)
else:
pkgs['install'].append(spec)
continue
# URL
if '://' in spec:
# download package so that we can check if it's already installed
with self.set_env_proxy():
package = fetch_file(self.module, spec)
envra = self.local_envra(package)
if envra is None:
self.module.fail_json(msg="Failed to get nevra information from RPM package: %s" % spec)
# local rpm files can't be updated
if self.is_installed(repoq, envra):
pkgs['update'].append(spec)
else:
pkgs['install'].append(spec)
continue
# dep/pkgname - find it
if self.is_installed(repoq, spec):
pkgs['update'].append(spec)
else:
pkgs['install'].append(spec)
pkglist = self.what_provides(repoq, spec)
# FIXME..? may not be desirable to throw an exception here if a single package is missing
if not pkglist:
res['msg'] += "No package matching '%s' found available, installed or updated" % spec
res['results'].append("No package matching '%s' found available, installed or updated" % spec)
res['rc'] = 126 # Ensure the task fails in with-loop
self.module.fail_json(**res)
nothing_to_do = True
for pkg in pkglist:
if spec in pkgs['install'] and self.is_available(repoq, pkg):
nothing_to_do = False
break
# this contains the full NVR and spec could contain wildcards
# or virtual provides (like "python-*" or "smtp-daemon") while
# updates contains name only.
pkgname, _, _, _, _ = splitFilename(pkg)
if spec in pkgs['update'] and pkgname in updates:
nothing_to_do = False
will_update.add(spec)
# Massage the updates list
if spec != pkgname:
# For reporting what packages would be updated more
# succinctly
will_update_from_other_package[spec] = pkgname
break
if not self.is_installed(repoq, spec) and self.update_only:
res['results'].append("Packages providing %s not installed due to update_only specified" % spec)
continue
if nothing_to_do:
res['results'].append("All packages providing %s are up to date" % spec)
continue
# if any of the packages are involved in a transaction, fail now
# so that we don't hang on the yum operation later
conflicts = self.transaction_exists(pkglist)
if conflicts:
res['msg'] += "The following packages have pending transactions: %s" % ", ".join(conflicts)
res['results'].append("The following packages have pending transactions: %s" % ", ".join(conflicts))
res['rc'] = 128 # Ensure the task fails in with-loop
self.module.fail_json(**res)
# check_mode output
to_update = []
for w in will_update:
if w.startswith('@'):
# yum groups
to_update.append((w, None))
elif w not in updates:
# There are (at least, probably more) 2 ways we can get here:
#
# * A virtual provides (our user specifies "webserver", but
# "httpd" is the key in 'updates').
#
# * A wildcard. emac* will get us here if there's a package
# called 'emacs' in the pending updates list. 'updates' will
# of course key on 'emacs' in that case.
other_pkg = will_update_from_other_package[w]
# We are guaranteed that: other_pkg in updates
# ...based on the logic above. But we only want to show one
# update in this case (given the wording of "at least") below.
# As an example, consider a package installed twice:
# foobar.x86_64, foobar.i686
# We want to avoid having both:
# ('foo*', 'because of (at least) foobar-1.x86_64 from repo')
# ('foo*', 'because of (at least) foobar-1.i686 from repo')
# We just pick the first one.
#
# TODO: This is something that might be nice to change, but it
# would be a module UI change. But without it, we're
# dropping potentially important information about what
# was updated. Instead of (given_spec, random_matching_package)
# it'd be nice if we appended (given_spec, [all_matching_packages])
#
# ... But then, we also drop information if multiple
# different (distinct) packages match the given spec and
# we should probably fix that too.
pkg = updates[other_pkg][0]
to_update.append(
(
w,
'because of (at least) %s-%s.%s from %s' % (
other_pkg,
pkg['version'],
pkg['dist'],
pkg['repo']
)
)
)
else:
# Otherwise the spec is an exact match
for pkg in updates[w]:
to_update.append(
(
w,
'%s.%s from %s' % (
pkg['version'],
pkg['dist'],
pkg['repo']
)
)
)
if self.update_only:
res['changes'] = dict(installed=[], updated=to_update)
else:
res['changes'] = dict(installed=pkgs['install'], updated=to_update)
if obsoletes:
res['obsoletes'] = obsoletes
# return results before we actually execute stuff
if self.module.check_mode:
if will_update or pkgs['install']:
res['changed'] = True
return res
if self.releasever:
cmd.extend(['--releasever=%s' % self.releasever])
# run commands
if cmd: # update all
rc, out, err = self.module.run_command(cmd)
res['changed'] = True
elif self.update_only:
if pkgs['update']:
cmd = self.yum_basecmd + ['update'] + pkgs['update']
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = self.module.run_command(cmd, environ_update=lang_env)
out_lower = out.strip().lower()
if not out_lower.endswith("no packages marked for update") and \
not out_lower.endswith("nothing to do"):
res['changed'] = True
else:
rc, out, err = [0, '', '']
elif pkgs['install'] or will_update and not self.update_only:
cmd = self.yum_basecmd + ['install'] + pkgs['install'] + pkgs['update']
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = self.module.run_command(cmd, environ_update=lang_env)
out_lower = out.strip().lower()
if not out_lower.endswith("no packages marked for update") and \
not out_lower.endswith("nothing to do"):
res['changed'] = True
else:
rc, out, err = [0, '', '']
res['rc'] = rc
res['msg'] += err
res['results'].append(out)
if rc:
res['failed'] = True
return res
def ensure(self, repoq):
pkgs = self.names
# autoremove was provided without `name`
if not self.names and self.autoremove:
pkgs = []
self.state = 'absent'
if self.conf_file and os.path.exists(self.conf_file):
self.yum_basecmd += ['-c', self.conf_file]
if repoq:
repoq += ['-c', self.conf_file]
if self.skip_broken:
self.yum_basecmd.extend(['--skip-broken'])
if self.disablerepo:
self.yum_basecmd.extend(['--disablerepo=%s' % ','.join(self.disablerepo)])
if self.enablerepo:
self.yum_basecmd.extend(['--enablerepo=%s' % ','.join(self.enablerepo)])
if self.enable_plugin:
self.yum_basecmd.extend(['--enableplugin', ','.join(self.enable_plugin)])
if self.disable_plugin:
self.yum_basecmd.extend(['--disableplugin', ','.join(self.disable_plugin)])
if self.exclude:
e_cmd = ['--exclude=%s' % ','.join(self.exclude)]
self.yum_basecmd.extend(e_cmd)
if self.disable_excludes:
self.yum_basecmd.extend(['--disableexcludes=%s' % self.disable_excludes])
if self.download_only:
self.yum_basecmd.extend(['--downloadonly'])
if self.download_dir:
self.yum_basecmd.extend(['--downloaddir=%s' % self.download_dir])
if self.releasever:
self.yum_basecmd.extend(['--releasever=%s' % self.releasever])
if self.installroot != '/':
# do not setup installroot by default, because of error
# CRITICAL:yum.cli:Config Error: Error accessing file for config file:////etc/yum.conf
# in old yum version (like in CentOS 6.6)
e_cmd = ['--installroot=%s' % self.installroot]
self.yum_basecmd.extend(e_cmd)
if self.state in ('installed', 'present', 'latest'):
""" The need of this entire if conditional has to be changed
this function is the ensure function that is called
in the main section.
This conditional tends to disable/enable repo for
install present latest action, same actually
can be done for remove and absent action
As solution I would advice to cal
try: self.yum_base.repos.disableRepo(disablerepo)
and
try: self.yum_base.repos.enableRepo(enablerepo)
right before any yum_cmd is actually called regardless
of yum action.
Please note that enable/disablerepo options are general
options, this means that we can call those with any action
option. https://linux.die.net/man/8/yum
This docstring will be removed together when issue: #21619
will be solved.
This has been triggered by: #19587
"""
if self.update_cache:
self.module.run_command(self.yum_basecmd + ['clean', 'expire-cache'])
try:
current_repos = self.yum_base.repos.repos.keys()
if self.enablerepo:
try:
new_repos = self.yum_base.repos.repos.keys()
for i in new_repos:
if i not in current_repos:
rid = self.yum_base.repos.getRepo(i)
a = rid.repoXML.repoid # nopep8 - https://github.com/ansible/ansible/pull/21475#pullrequestreview-22404868
current_repos = new_repos
except yum.Errors.YumBaseError as e:
self.module.fail_json(msg="Error setting/accessing repos: %s" % to_native(e))
except yum.Errors.YumBaseError as e:
self.module.fail_json(msg="Error accessing repos: %s" % to_native(e))
if self.state == 'latest' or self.update_only:
if self.disable_gpg_check:
self.yum_basecmd.append('--nogpgcheck')
if self.security:
self.yum_basecmd.append('--security')
if self.bugfix:
self.yum_basecmd.append('--bugfix')
res = self.latest(pkgs, repoq)
elif self.state in ('installed', 'present'):
if self.disable_gpg_check:
self.yum_basecmd.append('--nogpgcheck')
res = self.install(pkgs, repoq)
elif self.state in ('removed', 'absent'):
res = self.remove(pkgs, repoq)
else:
# should be caught by AnsibleModule argument_spec
self.module.fail_json(
msg="we should never get here unless this all failed",
changed=False,
results='',
errors='unexpected state'
)
return res
@staticmethod
def has_yum():
return HAS_YUM_PYTHON
def run(self):
"""
actually execute the module code backend
"""
if (not HAS_RPM_PYTHON or not HAS_YUM_PYTHON) and sys.executable != '/usr/bin/python' and not has_respawned():
respawn_module('/usr/bin/python')
# end of the line for this process; we'll exit here once the respawned module has completed
error_msgs = []
if not HAS_RPM_PYTHON:
error_msgs.append('The Python 2 bindings for rpm are needed for this module. If you require Python 3 support use the `dnf` Ansible module instead.')
if not HAS_YUM_PYTHON:
error_msgs.append('The Python 2 yum module is needed for this module. If you require Python 3 support use the `dnf` Ansible module instead.')
self.wait_for_lock()
if error_msgs:
self.module.fail_json(msg='. '.join(error_msgs))
# fedora will redirect yum to dnf, which has incompatibilities
# with how this module expects yum to operate. If yum-deprecated
# is available, use that instead to emulate the old behaviors.
if self.module.get_bin_path('yum-deprecated'):
yumbin = self.module.get_bin_path('yum-deprecated')
else:
yumbin = self.module.get_bin_path('yum')
# need debug level 2 to get 'Nothing to do' for groupinstall.
self.yum_basecmd = [yumbin, '-d', '2', '-y']
if self.update_cache and not self.names and not self.list:
rc, stdout, stderr = self.module.run_command(self.yum_basecmd + ['clean', 'expire-cache'])
if rc == 0:
self.module.exit_json(
changed=False,
msg="Cache updated",
rc=rc,
results=[]
)
else:
self.module.exit_json(
changed=False,
msg="Failed to update cache",
rc=rc,
results=[stderr],
)
repoquerybin = self.module.get_bin_path('repoquery', required=False)
if self.install_repoquery and not repoquerybin and not self.module.check_mode:
yum_path = self.module.get_bin_path('yum')
if yum_path:
if self.releasever:
self.module.run_command('%s -y install yum-utils --releasever %s' % (yum_path, self.releasever))
else:
self.module.run_command('%s -y install yum-utils' % yum_path)
repoquerybin = self.module.get_bin_path('repoquery', required=False)
if self.list:
if not repoquerybin:
self.module.fail_json(msg="repoquery is required to use list= with this module. Please install the yum-utils package.")
results = {'results': self.list_stuff(repoquerybin, self.list)}
else:
# If rhn-plugin is installed and no rhn-certificate is available on
# the system then users will see an error message using the yum API.
# Use repoquery in those cases.
repoquery = None
try:
yum_plugins = self.yum_base.plugins._plugins
except AttributeError:
pass
else:
if 'rhnplugin' in yum_plugins:
if repoquerybin:
repoquery = [repoquerybin, '--show-duplicates', '--plugins', '--quiet']
if self.installroot != '/':
repoquery.extend(['--installroot', self.installroot])
if self.disable_excludes:
# repoquery does not support --disableexcludes,
# so make a temp copy of yum.conf and get rid of the 'exclude=' line there
try:
with open('/etc/yum.conf', 'r') as f:
content = f.readlines()
tmp_conf_file = tempfile.NamedTemporaryFile(dir=self.module.tmpdir, delete=False)
self.module.add_cleanup_file(tmp_conf_file.name)
tmp_conf_file.writelines([c for c in content if not c.startswith("exclude=")])
tmp_conf_file.close()
except Exception as e:
self.module.fail_json(msg="Failure setting up repoquery: %s" % to_native(e))
repoquery.extend(['-c', tmp_conf_file.name])
results = self.ensure(repoquery)
if repoquery:
results['msg'] = '%s %s' % (
results.get('msg', ''),
'Warning: Due to potential bad behaviour with rhnplugin and certificates, used slower repoquery calls instead of Yum API.'
)
self.module.exit_json(**results)
def main():
# state=installed name=pkgspec
# state=removed name=pkgspec
# state=latest name=pkgspec
#
# informational commands:
# list=installed
# list=updates
# list=available
# list=repos
# list=pkgspec
yumdnf_argument_spec['argument_spec']['use_backend'] = dict(default='auto', choices=['auto', 'yum', 'yum4', 'dnf'])
module = AnsibleModule(
**yumdnf_argument_spec
)
module_implementation = YumModule(module)
module_implementation.run()
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 69,397 |
Feature: yum --cacheonly
|
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
Add a --cacheonly option to the ansible yum module, similar to the --downloadonly option.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
lib/ansible/modules/packaging/os/yum.py
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
We have a group of Oracle servers that have no repository management server. They update directly from Oracle's repositories. Our thought is to do a "yum update --downloadonly" on all dev and prod servers. Then we can update the dev servers first using "yum update --cacheonly" to test then updated and run the same command on the prod at a later time to make them consistent with the dev.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/69397
|
https://github.com/ansible/ansible/pull/73820
|
0c101f3f769b9aaed1a99f8858197f4d28b11034
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
| 2020-05-08T18:48:18Z |
python
| 2021-04-16T15:08:52Z |
test/integration/targets/dnf/tasks/cacheonly.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 69,397 |
Feature: yum --cacheonly
|
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
Add a --cacheonly option to the ansible yum module, similar to the --downloadonly option.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
lib/ansible/modules/packaging/os/yum.py
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
We have a group of Oracle servers that have no repository management server. They update directly from Oracle's repositories. Our thought is to do a "yum update --downloadonly" on all dev and prod servers. Then we can update the dev servers first using "yum update --cacheonly" to test then updated and run the same command on the prod at a later time to make them consistent with the dev.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/69397
|
https://github.com/ansible/ansible/pull/73820
|
0c101f3f769b9aaed1a99f8858197f4d28b11034
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
| 2020-05-08T18:48:18Z |
python
| 2021-04-16T15:08:52Z |
test/integration/targets/dnf/tasks/main.yml
|
# test code for the dnf module
# (c) 2014, James Tanner <[email protected]>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Note: We install the yum package onto Fedora so that this will work on dnf systems
# We want to test that for people who don't want to upgrade their systems.
- include_tasks: dnf.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
- include_tasks: filters_check_mode.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
tags:
- filters
- include_tasks: filters.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
tags:
- filters
- include_tasks: gpg.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
- include_tasks: repo.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
- include_tasks: dnfinstallroot.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
# Attempting to install a different RHEL release in a tmpdir doesn't work (rhel8 beta)
- include_tasks: dnfreleasever.yml
when:
- ansible_distribution == 'Fedora'
- ansible_distribution_major_version is version('23', '>=')
- include_tasks: modularity.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('29', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
- include_tasks: logging.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('31', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
# TODO: Construct our own instance where 'nobest' applies, so we can stop using
# a third-party repo to test this behavior.
- include_tasks: nobest.yml
when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('24', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 69,397 |
Feature: yum --cacheonly
|
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
Add a --cacheonly option to the ansible yum module, similar to the --downloadonly option.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
lib/ansible/modules/packaging/os/yum.py
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
We have a group of Oracle servers that have no repository management server. They update directly from Oracle's repositories. Our thought is to do a "yum update --downloadonly" on all dev and prod servers. Then we can update the dev servers first using "yum update --cacheonly" to test then updated and run the same command on the prod at a later time to make them consistent with the dev.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/69397
|
https://github.com/ansible/ansible/pull/73820
|
0c101f3f769b9aaed1a99f8858197f4d28b11034
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
| 2020-05-08T18:48:18Z |
python
| 2021-04-16T15:08:52Z |
test/integration/targets/yum/tasks/cacheonly.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 69,397 |
Feature: yum --cacheonly
|
<!--- Verify first that your feature was not already discussed on GitHub -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
<!--- Describe the new feature/improvement briefly below -->
Add a --cacheonly option to the ansible yum module, similar to the --downloadonly option.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
<!--- Write the short name of the module, plugin, task or feature below, use your best guess if unsure -->
lib/ansible/modules/packaging/os/yum.py
##### ADDITIONAL INFORMATION
<!--- Describe how the feature would be used, why it is needed and what it would solve -->
We have a group of Oracle servers that have no repository management server. They update directly from Oracle's repositories. Our thought is to do a "yum update --downloadonly" on all dev and prod servers. Then we can update the dev servers first using "yum update --cacheonly" to test then updated and run the same command on the prod at a later time to make them consistent with the dev.
<!--- Paste example playbooks or commands between quotes below -->
```yaml
```
<!--- HINT: You can also paste gist.github.com links for larger files -->
|
https://github.com/ansible/ansible/issues/69397
|
https://github.com/ansible/ansible/pull/73820
|
0c101f3f769b9aaed1a99f8858197f4d28b11034
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
| 2020-05-08T18:48:18Z |
python
| 2021-04-16T15:08:52Z |
test/integration/targets/yum/tasks/main.yml
|
# (c) 2014, James Tanner <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Note: We install the yum package onto Fedora so that this will work on dnf systems
# We want to test that for people who don't want to upgrade their systems.
- block:
- name: ensure test packages are removed before starting
yum:
name:
- sos
state: absent
- import_tasks: yum.yml
always:
- name: remove installed packages
yum:
name:
- sos
state: absent
- name: remove installed group
yum:
name: "@Custom Group"
state: absent
- name: On Fedora 28 the above won't remove the group which results in a failure in repo.yml below
yum:
name: dinginessentail
state: absent
when:
- ansible_distribution in ['Fedora']
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
- block:
- import_tasks: repo.yml
- import_tasks: yum_group_remove.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
always:
- yum_repository:
name: "{{ item }}"
state: absent
loop: "{{ repos }}"
- command: yum clean metadata
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
- import_tasks: yuminstallroot.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
- import_tasks: proxy.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
- import_tasks: check_mode_consistency.yml
when:
- (ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux'] and ansible_distribution_major_version|int == 7)
- import_tasks: lock.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
- import_tasks: multiarch.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
- ansible_architecture == 'x86_64'
# Our output parsing expects us to be on yum, not dnf
- ansible_distribution_major_version is version('7', '<=')
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,276 |
Can't use an integer value in body when using uri plugin with body_format: form-multipart
|
### Summary
When i try to use the URI plugin with body_format: form-multipart, it's not possible to put an integer value in body
```
- name: Upload a file via multipart/form-multipart
uri:
url: https://httpbin.org/post
method: POST
body_format: form-multipart
body:
text_form_field: 1
```
### Issue Type
Bug Report
### Component Name
uri.py
### Ansible Version
```console
$ ansible --version
ansible 2.10.8
config file = None
configured module search path = ['/home/jes/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/jes/ansible/lib/python3.8/site-packages/ansible
executable location = /home/jes/ansible/bin/ansible
python version = 3.8.5 (default, Jan 27 2021, 15:41:15) [GCC 9.3.0]
```
### Configuration
```console
$ ansible-config dump --only-changed
HOST_KEY_CHECKING(env: ANSIBLE_HOST_KEY_CHECKING) = False
```
### OS / Environment
Ubuntu 20.04.2 LTS
### Steps to Reproduce
<!--- - name: Upload a file via multipart/form-multipart
uri:
url: https://httpbin.org/post
method: POST
body_format: form-multipart
body:
text_form_field: 1 -->
### Expected Results
I expected to have no error since it works with string value
### Actual Results
```console
The full traceback is:
Traceback (most recent call last):
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/executor/task_executor.py", line 158, in run
res = self._execute()
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/executor/task_executor.py", line 663, in _execute
result = self._handler.run(task_vars=variables)
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/plugins/action/uri.py", line 66, in run
content = value.get('content')
AttributeError: 'int' object has no attribute 'get'
fatal: [portainer_ce]: FAILED! => {
"msg": "Unexpected failure during module execution.",
"stdout": ""
}
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74276
|
https://github.com/ansible/ansible/pull/74302
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
|
019452dda7ffa08b2f78b65844469c475368a3f8
| 2021-04-14T12:26:26Z |
python
| 2021-04-16T15:09:31Z |
changelogs/fragments/uri-multipart-int-value.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,276 |
Can't use an integer value in body when using uri plugin with body_format: form-multipart
|
### Summary
When i try to use the URI plugin with body_format: form-multipart, it's not possible to put an integer value in body
```
- name: Upload a file via multipart/form-multipart
uri:
url: https://httpbin.org/post
method: POST
body_format: form-multipart
body:
text_form_field: 1
```
### Issue Type
Bug Report
### Component Name
uri.py
### Ansible Version
```console
$ ansible --version
ansible 2.10.8
config file = None
configured module search path = ['/home/jes/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/jes/ansible/lib/python3.8/site-packages/ansible
executable location = /home/jes/ansible/bin/ansible
python version = 3.8.5 (default, Jan 27 2021, 15:41:15) [GCC 9.3.0]
```
### Configuration
```console
$ ansible-config dump --only-changed
HOST_KEY_CHECKING(env: ANSIBLE_HOST_KEY_CHECKING) = False
```
### OS / Environment
Ubuntu 20.04.2 LTS
### Steps to Reproduce
<!--- - name: Upload a file via multipart/form-multipart
uri:
url: https://httpbin.org/post
method: POST
body_format: form-multipart
body:
text_form_field: 1 -->
### Expected Results
I expected to have no error since it works with string value
### Actual Results
```console
The full traceback is:
Traceback (most recent call last):
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/executor/task_executor.py", line 158, in run
res = self._execute()
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/executor/task_executor.py", line 663, in _execute
result = self._handler.run(task_vars=variables)
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/plugins/action/uri.py", line 66, in run
content = value.get('content')
AttributeError: 'int' object has no attribute 'get'
fatal: [portainer_ce]: FAILED! => {
"msg": "Unexpected failure during module execution.",
"stdout": ""
}
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74276
|
https://github.com/ansible/ansible/pull/74302
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
|
019452dda7ffa08b2f78b65844469c475368a3f8
| 2021-04-14T12:26:26Z |
python
| 2021-04-16T15:09:31Z |
lib/ansible/module_utils/common/collections.py
|
# Copyright: (c) 2018, Sviatoslav Sydorenko <[email protected]>
# Copyright: (c) 2018, Ansible Project
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
"""Collection of low-level utility functions."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.six import binary_type, text_type
from ansible.module_utils.common._collections_compat import Hashable, Mapping, Sequence
class ImmutableDict(Hashable, Mapping):
"""Dictionary that cannot be updated"""
def __init__(self, *args, **kwargs):
self._store = dict(*args, **kwargs)
def __getitem__(self, key):
return self._store[key]
def __iter__(self):
return self._store.__iter__()
def __len__(self):
return self._store.__len__()
def __hash__(self):
return hash(frozenset(self.items()))
def __eq__(self, other):
try:
if self.__hash__() == hash(other):
return True
except TypeError:
pass
return False
def __repr__(self):
return 'ImmutableDict({0})'.format(repr(self._store))
def union(self, overriding_mapping):
"""
Create an ImmutableDict as a combination of the original and overriding_mapping
:arg overriding_mapping: A Mapping of replacement and additional items
:return: A copy of the ImmutableDict with key-value pairs from the overriding_mapping added
If any of the keys in overriding_mapping are already present in the original ImmutableDict,
the overriding_mapping item replaces the one in the original ImmutableDict.
"""
return ImmutableDict(self._store, **overriding_mapping)
def difference(self, subtractive_iterable):
"""
Create an ImmutableDict as a combination of the original minus keys in subtractive_iterable
:arg subtractive_iterable: Any iterable containing keys that should not be present in the
new ImmutableDict
:return: A copy of the ImmutableDict with keys from the subtractive_iterable removed
"""
remove_keys = frozenset(subtractive_iterable)
keys = (k for k in self._store.keys() if k not in remove_keys)
return ImmutableDict((k, self._store[k]) for k in keys)
def is_string(seq):
"""Identify whether the input has a string-like type (inclding bytes)."""
# AnsibleVaultEncryptedUnicode inherits from Sequence, but is expected to be a string like object
return isinstance(seq, (text_type, binary_type)) or getattr(seq, '__ENCRYPTED__', False)
def is_iterable(seq, include_strings=False):
"""Identify whether the input is an iterable."""
if not include_strings and is_string(seq):
return False
try:
iter(seq)
return True
except TypeError:
return False
def is_sequence(seq, include_strings=False):
"""Identify whether the input is a sequence.
Strings and bytes are not sequences here,
unless ``include_string`` is ``True``.
Non-indexable things are never of a sequence type.
"""
if not include_strings and is_string(seq):
return False
return isinstance(seq, Sequence)
def count(seq):
"""Returns a dictionary with the number of appearances of each element of the iterable.
Resembles the collections.Counter class functionality. It is meant to be used when the
code is run on Python 2.6.* where collections.Counter is not available. It should be
deprecated and replaced when support for Python < 2.7 is dropped.
"""
if not is_iterable(seq):
raise Exception('Argument provided is not an iterable')
counters = dict()
for elem in seq:
counters[elem] = counters.get(elem, 0) + 1
return counters
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,276 |
Can't use an integer value in body when using uri plugin with body_format: form-multipart
|
### Summary
When i try to use the URI plugin with body_format: form-multipart, it's not possible to put an integer value in body
```
- name: Upload a file via multipart/form-multipart
uri:
url: https://httpbin.org/post
method: POST
body_format: form-multipart
body:
text_form_field: 1
```
### Issue Type
Bug Report
### Component Name
uri.py
### Ansible Version
```console
$ ansible --version
ansible 2.10.8
config file = None
configured module search path = ['/home/jes/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/jes/ansible/lib/python3.8/site-packages/ansible
executable location = /home/jes/ansible/bin/ansible
python version = 3.8.5 (default, Jan 27 2021, 15:41:15) [GCC 9.3.0]
```
### Configuration
```console
$ ansible-config dump --only-changed
HOST_KEY_CHECKING(env: ANSIBLE_HOST_KEY_CHECKING) = False
```
### OS / Environment
Ubuntu 20.04.2 LTS
### Steps to Reproduce
<!--- - name: Upload a file via multipart/form-multipart
uri:
url: https://httpbin.org/post
method: POST
body_format: form-multipart
body:
text_form_field: 1 -->
### Expected Results
I expected to have no error since it works with string value
### Actual Results
```console
The full traceback is:
Traceback (most recent call last):
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/executor/task_executor.py", line 158, in run
res = self._execute()
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/executor/task_executor.py", line 663, in _execute
result = self._handler.run(task_vars=variables)
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/plugins/action/uri.py", line 66, in run
content = value.get('content')
AttributeError: 'int' object has no attribute 'get'
fatal: [portainer_ce]: FAILED! => {
"msg": "Unexpected failure during module execution.",
"stdout": ""
}
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74276
|
https://github.com/ansible/ansible/pull/74302
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
|
019452dda7ffa08b2f78b65844469c475368a3f8
| 2021-04-14T12:26:26Z |
python
| 2021-04-16T15:09:31Z |
lib/ansible/plugins/action/uri.py
|
# -*- coding: utf-8 -*-
# (c) 2015, Brian Coca <[email protected]>
# (c) 2018, Matt Martz <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.errors import AnsibleError, AnsibleAction, _AnsibleActionDone, AnsibleActionFail
from ansible.module_utils._text import to_native
from ansible.module_utils.common.collections import Mapping
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import text_type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def run(self, tmp=None, task_vars=None):
self._supports_async = True
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
body_format = self._task.args.get('body_format', 'raw')
body = self._task.args.get('body')
src = self._task.args.get('src', None)
remote_src = boolean(self._task.args.get('remote_src', 'no'), strict=False)
try:
if remote_src:
# everything is remote, so we just execute the module
# without changing any of the module arguments
# call with ansible.legacy prefix to prevent collections collisions while allowing local override
raise _AnsibleActionDone(result=self._execute_module(module_name='ansible.legacy.uri',
task_vars=task_vars, wrap_async=self._task.async_val))
kwargs = {}
if src:
try:
src = self._find_needle('files', src)
except AnsibleError as e:
raise AnsibleActionFail(to_native(e))
tmp_src = self._connection._shell.join_path(self._connection._shell.tmpdir, os.path.basename(src))
kwargs['src'] = tmp_src
self._transfer_file(src, tmp_src)
self._fixup_perms2((self._connection._shell.tmpdir, tmp_src))
elif body_format == 'form-multipart':
if not isinstance(body, Mapping):
raise AnsibleActionFail(
'body must be mapping, cannot be type %s' % body.__class__.__name__
)
for field, value in body.items():
if isinstance(value, text_type):
continue
content = value.get('content')
filename = value.get('filename')
if not filename or content:
continue
try:
filename = self._find_needle('files', filename)
except AnsibleError as e:
raise AnsibleActionFail(to_native(e))
tmp_src = self._connection._shell.join_path(
self._connection._shell.tmpdir,
os.path.basename(filename)
)
value['filename'] = tmp_src
self._transfer_file(filename, tmp_src)
self._fixup_perms2((self._connection._shell.tmpdir, tmp_src))
kwargs['body'] = body
new_module_args = self._task.args.copy()
new_module_args.update(kwargs)
# call with ansible.legacy prefix to prevent collections collisions while allowing local override
result.update(self._execute_module('ansible.legacy.uri', module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
except AnsibleAction as e:
result.update(e.result)
finally:
if not self._task.async_val:
self._remove_tmp_path(self._connection._shell.tmpdir)
return result
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 74,276 |
Can't use an integer value in body when using uri plugin with body_format: form-multipart
|
### Summary
When i try to use the URI plugin with body_format: form-multipart, it's not possible to put an integer value in body
```
- name: Upload a file via multipart/form-multipart
uri:
url: https://httpbin.org/post
method: POST
body_format: form-multipart
body:
text_form_field: 1
```
### Issue Type
Bug Report
### Component Name
uri.py
### Ansible Version
```console
$ ansible --version
ansible 2.10.8
config file = None
configured module search path = ['/home/jes/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/jes/ansible/lib/python3.8/site-packages/ansible
executable location = /home/jes/ansible/bin/ansible
python version = 3.8.5 (default, Jan 27 2021, 15:41:15) [GCC 9.3.0]
```
### Configuration
```console
$ ansible-config dump --only-changed
HOST_KEY_CHECKING(env: ANSIBLE_HOST_KEY_CHECKING) = False
```
### OS / Environment
Ubuntu 20.04.2 LTS
### Steps to Reproduce
<!--- - name: Upload a file via multipart/form-multipart
uri:
url: https://httpbin.org/post
method: POST
body_format: form-multipart
body:
text_form_field: 1 -->
### Expected Results
I expected to have no error since it works with string value
### Actual Results
```console
The full traceback is:
Traceback (most recent call last):
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/executor/task_executor.py", line 158, in run
res = self._execute()
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/executor/task_executor.py", line 663, in _execute
result = self._handler.run(task_vars=variables)
File "/home/jes/ansible/lib/python3.8/site-packages/ansible/plugins/action/uri.py", line 66, in run
content = value.get('content')
AttributeError: 'int' object has no attribute 'get'
fatal: [portainer_ce]: FAILED! => {
"msg": "Unexpected failure during module execution.",
"stdout": ""
}
```
### Code of Conduct
I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/74276
|
https://github.com/ansible/ansible/pull/74302
|
fdee5ca16dd169b83997b6c82130a2d9c7fe3688
|
019452dda7ffa08b2f78b65844469c475368a3f8
| 2021-04-14T12:26:26Z |
python
| 2021-04-16T15:09:31Z |
test/integration/targets/uri/tasks/main.yml
|
# test code for the uri module
# (c) 2014, Leonid Evdokimov <[email protected]>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
- name: set role facts
set_fact:
http_port: 15260
files_dir: '{{ output_dir|expanduser }}/files'
checkout_dir: '{{ output_dir }}/git'
- name: create a directory to serve files from
file:
dest: "{{ files_dir }}"
state: directory
- copy:
src: "{{ item }}"
dest: "{{files_dir}}/{{ item }}"
with_sequence: start=0 end=4 format=pass%d.json
- copy:
src: "{{ item }}"
dest: "{{files_dir}}/{{ item }}"
with_sequence: start=0 end=30 format=fail%d.json
- copy:
src: "testserver.py"
dest: "{{ output_dir }}/testserver.py"
- name: start SimpleHTTPServer
shell: cd {{ files_dir }} && {{ ansible_python.executable }} {{ output_dir}}/testserver.py {{ http_port }}
async: 120 # this test set can take ~1m to run on FreeBSD (via Shippable)
poll: 0
- wait_for: port={{ http_port }}
- name: checksum pass_json
stat: path={{ files_dir }}/{{ item }}.json get_checksum=yes
register: pass_checksum
with_sequence: start=0 end=4 format=pass%d
- name: fetch pass_json
uri: return_content=yes url=http://localhost:{{ http_port }}/{{ item }}.json
register: fetch_pass_json
with_sequence: start=0 end=4 format=pass%d
- name: check pass_json
assert:
that:
- '"json" in item.1'
- item.0.stat.checksum == item.1.content | checksum
with_together:
- "{{pass_checksum.results}}"
- "{{fetch_pass_json.results}}"
- name: checksum fail_json
stat: path={{ files_dir }}/{{ item }}.json get_checksum=yes
register: fail_checksum
with_sequence: start=0 end=30 format=fail%d
- name: fetch fail_json
uri: return_content=yes url=http://localhost:{{ http_port }}/{{ item }}.json
register: fail
with_sequence: start=0 end=30 format=fail%d
- name: check fail_json
assert:
that:
- item.0.stat.checksum == item.1.content | checksum
- '"json" not in item.1'
with_together:
- "{{fail_checksum.results}}"
- "{{fail.results}}"
- name: test https fetch to a site with mismatched hostname and certificate
uri:
url: "https://{{ badssl_host }}/"
dest: "{{ output_dir }}/shouldnotexist.html"
ignore_errors: True
register: result
- stat:
path: "{{ output_dir }}/shouldnotexist.html"
register: stat_result
- name: Assert that the file was not downloaded
assert:
that:
- result.failed == true
- "'Failed to validate the SSL certificate' in result.msg or 'Hostname mismatch' in result.msg or (result.msg is match('hostname .* doesn.t match .*'))"
- stat_result.stat.exists == false
- result.status is defined
- result.status == -1
- result.url == 'https://' ~ badssl_host ~ '/'
- name: Clean up any cruft from the results directory
file:
name: "{{ output_dir }}/kreitz.html"
state: absent
- name: test https fetch to a site with mismatched hostname and certificate and validate_certs=no
uri:
url: "https://{{ badssl_host }}/"
dest: "{{ output_dir }}/kreitz.html"
validate_certs: no
register: result
- stat:
path: "{{ output_dir }}/kreitz.html"
register: stat_result
- name: Assert that the file was downloaded
assert:
that:
- "stat_result.stat.exists == true"
- "result.changed == true"
- name: "get ca certificate {{ self_signed_host }}"
get_url:
url: "http://{{ httpbin_host }}/ca2cert.pem"
dest: "{{ remote_tmp_dir }}/ca2cert.pem"
- name: test https fetch to a site with self signed certificate using ca_path
uri:
url: "https://{{ self_signed_host }}:444/"
dest: "{{ output_dir }}/self-signed_using_ca_path.html"
ca_path: "{{ remote_tmp_dir }}/ca2cert.pem"
validate_certs: yes
register: result
- stat:
path: "{{ output_dir }}/self-signed_using_ca_path.html"
register: stat_result
- name: Assert that the file was downloaded
assert:
that:
- "stat_result.stat.exists == true"
- "result.changed == true"
- name: test https fetch to a site with self signed certificate without using ca_path
uri:
url: "https://{{ self_signed_host }}:444/"
dest: "{{ output_dir }}/self-signed-without_using_ca_path.html"
validate_certs: yes
register: result
ignore_errors: true
- stat:
path: "{{ output_dir }}/self-signed-without_using_ca_path.html"
register: stat_result
- name: Assure that https access to a host with self-signed certificate without providing ca_path fails
assert:
that:
- "stat_result.stat.exists == false"
- result is failed
- "'certificate verify failed' in result.msg"
- name: test redirect without follow_redirects
uri:
url: 'https://{{ httpbin_host }}/redirect/2'
follow_redirects: 'none'
status_code: 302
register: result
- name: Assert location header
assert:
that:
- 'result.location|default("") == "https://{{ httpbin_host }}/relative-redirect/1"'
- name: Check SSL with redirect
uri:
url: 'https://{{ httpbin_host }}/redirect/2'
register: result
- name: Assert SSL with redirect
assert:
that:
- 'result.url|default("") == "https://{{ httpbin_host }}/get"'
- name: redirect to bad SSL site
uri:
url: 'http://{{ badssl_host }}'
register: result
ignore_errors: true
- name: Ensure bad SSL site reidrect fails
assert:
that:
- result is failed
- 'badssl_host in result.msg'
- name: test basic auth
uri:
url: 'https://{{ httpbin_host }}/basic-auth/user/passwd'
user: user
password: passwd
- name: test basic forced auth
uri:
url: 'https://{{ httpbin_host }}/hidden-basic-auth/user/passwd'
force_basic_auth: true
user: user
password: passwd
- name: test digest auth
uri:
url: 'https://{{ httpbin_host }}/digest-auth/auth/user/passwd'
user: user
password: passwd
headers:
Cookie: "fake=fake_value"
- name: test PUT
uri:
url: 'https://{{ httpbin_host }}/put'
method: PUT
body: 'foo=bar'
- name: test OPTIONS
uri:
url: 'https://{{ httpbin_host }}/'
method: OPTIONS
register: result
- name: Assert we got an allow header
assert:
that:
- 'result.allow.split(", ")|sort == ["GET", "HEAD", "OPTIONS"]'
# Ubuntu12.04 doesn't have python-urllib3, this makes handling required dependencies a pain across all variations
# We'll use this to just skip 12.04 on those tests. We should be sufficiently covered with other OSes and versions
- name: Set fact if running on Ubuntu 12.04
set_fact:
is_ubuntu_precise: "{{ ansible_distribution == 'Ubuntu' and ansible_distribution_release == 'precise' }}"
- name: Test that SNI succeeds on python versions that have SNI
uri:
url: 'https://{{ sni_host }}/'
return_content: true
when: ansible_python.has_sslcontext
register: result
- name: Assert SNI verification succeeds on new python
assert:
that:
- result is successful
- 'sni_host in result.content'
when: ansible_python.has_sslcontext
- name: Verify SNI verification fails on old python without urllib3 contrib
uri:
url: 'https://{{ sni_host }}'
ignore_errors: true
when: not ansible_python.has_sslcontext
register: result
- name: Assert SNI verification fails on old python
assert:
that:
- result is failed
when: result is not skipped
- name: check if urllib3 is installed as an OS package
package:
name: "{{ uri_os_packages[ansible_os_family].urllib3 }}"
check_mode: yes
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool and uri_os_packages[ansible_os_family].urllib3|default
register: urllib3
- name: uninstall conflicting urllib3 pip package
pip:
name: urllib3
state: absent
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool and uri_os_packages[ansible_os_family].urllib3|default and urllib3.changed
- name: install OS packages that are needed for SNI on old python
package:
name: "{{ item }}"
with_items: "{{ uri_os_packages[ansible_os_family].step1 | default([]) }}"
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: install python modules for Older Python SNI verification
pip:
name: "{{ item }}"
with_items:
- ndg-httpsclient
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: Verify SNI verification succeeds on old python with urllib3 contrib
uri:
url: 'https://{{ sni_host }}'
return_content: true
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
register: result
- name: Assert SNI verification succeeds on old python
assert:
that:
- result is successful
- 'sni_host in result.content'
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: Uninstall ndg-httpsclient
pip:
name: "{{ item }}"
state: absent
with_items:
- ndg-httpsclient
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: uninstall OS packages that are needed for SNI on old python
package:
name: "{{ item }}"
state: absent
with_items: "{{ uri_os_packages[ansible_os_family].step1 | default([]) }}"
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: install OS packages that are needed for building cryptography
package:
name: "{{ item }}"
with_items: "{{ uri_os_packages[ansible_os_family].step2 | default([]) }}"
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: install urllib3 and pyopenssl via pip
pip:
name: "{{ item }}"
state: latest
extra_args: "-c {{ remote_constraints }}"
with_items:
- urllib3
- PyOpenSSL
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: Verify SNI verification succeeds on old python with pip urllib3 contrib
uri:
url: 'https://{{ sni_host }}'
return_content: true
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
register: result
- name: Assert SNI verification succeeds on old python with pip urllib3 contrib
assert:
that:
- result is successful
- 'sni_host in result.content'
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: Uninstall urllib3 and PyOpenSSL
pip:
name: "{{ item }}"
state: absent
with_items:
- urllib3
- PyOpenSSL
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
- name: validate the status_codes are correct
uri:
url: "https://{{ httpbin_host }}/status/202"
status_code: 202
method: POST
body: foo
- name: Validate body_format json does not override content-type in 2.3 or newer
uri:
url: "https://{{ httpbin_host }}/post"
method: POST
body:
foo: bar
body_format: json
headers:
'Content-Type': 'text/json'
return_content: true
register: result
failed_when: result.json.headers['Content-Type'] != 'text/json'
- name: Validate body_format form-urlencoded using dicts works
uri:
url: https://{{ httpbin_host }}/post
method: POST
body:
user: foo
password: bar!#@ |&82$M
submit: Sign in
body_format: form-urlencoded
return_content: yes
register: result
- name: Assert form-urlencoded dict input
assert:
that:
- result is successful
- result.json.headers['Content-Type'] == 'application/x-www-form-urlencoded'
- result.json.form.password == 'bar!#@ |&82$M'
- name: Validate body_format form-urlencoded using lists works
uri:
url: https://{{ httpbin_host }}/post
method: POST
body:
- [ user, foo ]
- [ password, bar!#@ |&82$M ]
- [ submit, Sign in ]
body_format: form-urlencoded
return_content: yes
register: result
- name: Assert form-urlencoded list input
assert:
that:
- result is successful
- result.json.headers['Content-Type'] == 'application/x-www-form-urlencoded'
- result.json.form.password == 'bar!#@ |&82$M'
- name: Validate body_format form-urlencoded of invalid input fails
uri:
url: https://{{ httpbin_host }}/post
method: POST
body:
- foo
- bar: baz
body_format: form-urlencoded
return_content: yes
register: result
ignore_errors: yes
- name: Assert invalid input fails
assert:
that:
- result is failure
- "'failed to parse body as form_urlencoded: too many values to unpack' in result.msg"
- name: multipart/form-data
uri:
url: https://{{ httpbin_host }}/post
method: POST
body_format: form-multipart
body:
file1:
filename: formdata.txt
file2:
content: text based file content
filename: fake.txt
mime_type: text/plain
text_form_field1: value1
text_form_field2:
content: value2
mime_type: text/plain
register: multipart
- name: Assert multipart/form-data
assert:
that:
- multipart.json.files.file1 == '_multipart/form-data_\n'
- multipart.json.files.file2 == 'text based file content'
- multipart.json.form.text_form_field1 == 'value1'
- multipart.json.form.text_form_field2 == 'value2'
- name: Validate invalid method
uri:
url: https://{{ httpbin_host }}/anything
method: UNKNOWN
register: result
ignore_errors: yes
- name: Assert invalid method fails
assert:
that:
- result is failure
- result.status == 405
- "'METHOD NOT ALLOWED' in result.msg"
- name: Test client cert auth, no certs
uri:
url: "https://ansible.http.tests/ssl_client_verify"
status_code: 200
return_content: true
register: result
failed_when: result.content != "ansible.http.tests:NONE"
when: has_httptester
- name: Test client cert auth, with certs
uri:
url: "https://ansible.http.tests/ssl_client_verify"
client_cert: "{{ remote_tmp_dir }}/client.pem"
client_key: "{{ remote_tmp_dir }}/client.key"
return_content: true
register: result
failed_when: result.content != "ansible.http.tests:SUCCESS"
when: has_httptester
- name: Test client cert auth, with no validation
uri:
url: "https://fail.ansible.http.tests/ssl_client_verify"
client_cert: "{{ remote_tmp_dir }}/client.pem"
client_key: "{{ remote_tmp_dir }}/client.key"
return_content: true
validate_certs: no
register: result
failed_when: result.content != "ansible.http.tests:SUCCESS"
when: has_httptester
- name: Test client cert auth, with validation and ssl mismatch
uri:
url: "https://fail.ansible.http.tests/ssl_client_verify"
client_cert: "{{ remote_tmp_dir }}/client.pem"
client_key: "{{ remote_tmp_dir }}/client.key"
return_content: true
validate_certs: yes
register: result
failed_when: result is not failed
when: has_httptester
- uri:
url: https://{{ httpbin_host }}/response-headers?Set-Cookie=Foo%3Dbar&Set-Cookie=Baz%3Dqux
register: result
- assert:
that:
- result['set_cookie'] == 'Foo=bar, Baz=qux'
# Python sorts cookies in order of most specific (ie. longest) path first
# items with the same path are reversed from response order
- result['cookies_string'] == 'Baz=qux; Foo=bar'
- name: Write out netrc template
template:
src: netrc.j2
dest: "{{ remote_tmp_dir }}/netrc"
- name: Test netrc with port
uri:
url: "https://{{ httpbin_host }}:443/basic-auth/user/passwd"
environment:
NETRC: "{{ remote_tmp_dir }}/netrc"
- name: Test JSON POST with src
uri:
url: "https://{{ httpbin_host}}/post"
src: pass0.json
method: POST
return_content: true
body_format: json
register: result
- name: Validate POST with src works
assert:
that:
- result.json.json[0] == 'JSON Test Pattern pass1'
- name: Copy file pass0.json to remote
copy:
src: "{{ role_path }}/files/pass0.json"
dest: "{{ remote_tmp_dir }}/pass0.json"
- name: Test JSON POST with src and remote_src=True
uri:
url: "https://{{ httpbin_host}}/post"
src: "{{ remote_tmp_dir }}/pass0.json"
remote_src: true
method: POST
return_content: true
body_format: json
register: result
- name: Validate POST with src and remote_src=True works
assert:
that:
- result.json.json[0] == 'JSON Test Pattern pass1'
- name: Make request that includes password in JSON keys
uri:
url: "https://{{ httpbin_host}}/get?key-password=value-password"
user: admin
password: password
register: sanitize_keys
- name: assert that keys were sanitized
assert:
that:
- sanitize_keys.json.args['key-********'] == 'value-********'
- name: Create a testing file
copy:
content: "content"
dest: "{{ output_dir }}/output"
- name: Download a file from non existing location
uri:
url: http://does/not/exist
dest: "{{ output_dir }}/output"
ignore_errors: yes
- name: Save testing file's output
command: "cat {{ output_dir }}/output"
register: file_out
- name: Test the testing file was not overwritten
assert:
that:
- "'content' in file_out.stdout"
- name: Clean up
file:
dest: "{{ output_dir }}/output"
state: absent
- name: Test follow_redirects=none
import_tasks: redirect-none.yml
- name: Test follow_redirects=safe
import_tasks: redirect-safe.yml
- name: Test follow_redirects=urllib2
import_tasks: redirect-urllib2.yml
- name: Test follow_redirects=all
import_tasks: redirect-all.yml
- name: Check unexpected failures
import_tasks: unexpected-failures.yml
- name: Check return-content
import_tasks: return-content.yml
- name: Test use_gssapi=True
include_tasks:
file: use_gssapi.yml
apply:
environment:
KRB5_CONFIG: '{{ krb5_config }}'
KRB5CCNAME: FILE:{{ remote_tmp_dir }}/krb5.cc
when: krb5_config is defined
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,841 |
Redirect in documentation is broken for aws_s3_module from latest when going to 2.9 in the dropdown
|
### Summary
When finding documentation on the aws_s3_module:
https://docs.ansible.com/ansible/latest/collections/amazon/aws/aws_s3_module.html
Clicking the dropdown and changing the value to 2.9 redirects the documentation here:
https://docs.ansible.com/ansible/latest/collections/amazon/aws/aws_s2.9_module.html
which results in the 404 error.
The correct documentation is located here:
https://docs.ansible.com/ansible/2.9/collections/amazon/aws/aws_s3_module.html
It appears as though the redirect notices the 3 in the aws_s3_module part of the URL and tries to swap in 2.9 instead.
### Issue Type
Documentation Report
### Component Name
docs
### Ansible Version
```console (paste below)
$ ansible --version
The issue is in the online documentation.
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
The issue is in the online documentation hyperlink
```
### OS / Environment
all
|
https://github.com/ansible/ansible/issues/73841
|
https://github.com/ansible/ansible/pull/74089
|
9ba6cf9a72504c3cc863716cc77317ef9ade4728
|
325ccf22fee9a56eb6bd0ed148bd2cffc7449922
| 2021-03-09T21:13:18Z |
python
| 2021-04-19T16:33:35Z |
docs/docsite/_themes/sphinx_rtd_theme/ansible_versions.html
|
<!--- Based on https://github.com/rtfd/sphinx_rtd_theme/pull/438/files -->
{# Creates dropdown version selection in the top-left navigation. #}
<div class="version">
{% if (not READTHEDOCS) and (available_versions is defined) %}
<div class="version-dropdown">
<select class="version-list" id="version-list" onchange="javascript:location.href = this.value;">
<script> x = document.getElementById("version-list"); </script>
{% for slug in available_versions %}
<script>
current_url = window.location.href;
option = document.createElement("option");
option.text = "{{ slug }}";
if ( "{{ slug }}" == "{{ current_version }}" ) {
option.selected = true;
}
if (current_url.search("{{ current_version }}") > -1) {
option.value = current_url.replace("{{ current_version }}","{{ slug }}");
} else {
option.value = current_url.replace("latest","{{ slug }}");
}
x.add(option);
</script>
{% endfor %}
</select>
</div>
{% else %}
{{ nav_version }}
{% endif %}
</div>
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,926 |
Massive overhead of hosts templating for every host of every task
|
### Summary
Executing a playbook with many hosts, there is massive overhead in checking whether the hosts list is a pattern, see for yourself on this callgrind graph:

https://github.com/ansible/ansible/blob/da60525610a384bb04833b1c6429d9db6a87ef64/lib/ansible/vars/manager.py#L490
That check has quadratic runtime with the number of hosts - with a lot of hosts, commenting that check out reduces runtime manyfold.
### Issue Type
Bug Report
### Component Name
variable manager
### Ansible Version
2.10.5, devel
### OS / Environment
Ubuntu
### Steps to Reproduce
```yaml
- hosts: localhost
tasks:
- set_fact:
extra_tasks: true # compute it
hosts: "{{ [an, array, with, 5100, computed, hosts] }}"
- hosts: "{{ hostvars.localhost.hosts }}"
vars:
extra_tasks: "{{ hostvars.localhost.do_extra_tasks }}"
tasks:
# some more tasks here
- name: Execute extra tasks
command: /bin/true # example
when: extra_tasks
```
### Expected Results
Execute in tens of seconds.
### Actual Results
Take over 5 minutes to execute.
|
https://github.com/ansible/ansible/issues/73926
|
https://github.com/ansible/ansible/pull/73941
|
d8bf4206e446c45ba057e85819278cef5fbeff2c
|
3740d7b028b23a1630f544e01cabf1fa3a50257c
| 2021-03-16T18:09:49Z |
python
| 2021-04-19T17:03:40Z |
changelogs/fragments/73926-is-template-play-hosts.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,926 |
Massive overhead of hosts templating for every host of every task
|
### Summary
Executing a playbook with many hosts, there is massive overhead in checking whether the hosts list is a pattern, see for yourself on this callgrind graph:

https://github.com/ansible/ansible/blob/da60525610a384bb04833b1c6429d9db6a87ef64/lib/ansible/vars/manager.py#L490
That check has quadratic runtime with the number of hosts - with a lot of hosts, commenting that check out reduces runtime manyfold.
### Issue Type
Bug Report
### Component Name
variable manager
### Ansible Version
2.10.5, devel
### OS / Environment
Ubuntu
### Steps to Reproduce
```yaml
- hosts: localhost
tasks:
- set_fact:
extra_tasks: true # compute it
hosts: "{{ [an, array, with, 5100, computed, hosts] }}"
- hosts: "{{ hostvars.localhost.hosts }}"
vars:
extra_tasks: "{{ hostvars.localhost.do_extra_tasks }}"
tasks:
# some more tasks here
- name: Execute extra tasks
command: /bin/true # example
when: extra_tasks
```
### Expected Results
Execute in tens of seconds.
### Actual Results
Take over 5 minutes to execute.
|
https://github.com/ansible/ansible/issues/73926
|
https://github.com/ansible/ansible/pull/73941
|
d8bf4206e446c45ba057e85819278cef5fbeff2c
|
3740d7b028b23a1630f544e01cabf1fa3a50257c
| 2021-03-16T18:09:49Z |
python
| 2021-04-19T17:03:40Z |
lib/ansible/playbook/base.py
|
# Copyright: (c) 2012-2014, Michael DeHaan <[email protected]>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import itertools
import operator
from copy import copy as shallowcopy
from functools import partial
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible import context
from ansible.errors import AnsibleError
from ansible.module_utils.six import iteritems, string_types, with_metaclass
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils._text import to_text, to_native
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.parsing.dataloader import DataLoader
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
from ansible.utils.vars import combine_vars, isidentifier, get_unique_id
display = Display()
def _generic_g(prop_name, self):
try:
value = self._attributes[prop_name]
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
if value is Sentinel:
value = self._attr_defaults[prop_name]
return value
def _generic_g_method(prop_name, self):
try:
if self._squashed:
return self._attributes[prop_name]
method = "_get_attr_%s" % prop_name
return getattr(self, method)()
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
def _generic_g_parent(prop_name, self):
try:
if self._squashed or self._finalized:
value = self._attributes[prop_name]
else:
try:
value = self._get_parent_attribute(prop_name)
except AttributeError:
value = self._attributes[prop_name]
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
if value is Sentinel:
value = self._attr_defaults[prop_name]
return value
def _generic_s(prop_name, self, value):
self._attributes[prop_name] = value
def _generic_d(prop_name, self):
del self._attributes[prop_name]
class BaseMeta(type):
"""
Metaclass for the Base object, which is used to construct the class
attributes based on the FieldAttributes available.
"""
def __new__(cls, name, parents, dct):
def _create_attrs(src_dict, dst_dict):
'''
Helper method which creates the attributes based on those in the
source dictionary of attributes. This also populates the other
attributes used to keep track of these attributes and via the
getter/setter/deleter methods.
'''
keys = list(src_dict.keys())
for attr_name in keys:
value = src_dict[attr_name]
if isinstance(value, Attribute):
if attr_name.startswith('_'):
attr_name = attr_name[1:]
# here we selectively assign the getter based on a few
# things, such as whether we have a _get_attr_<name>
# method, or if the attribute is marked as not inheriting
# its value from a parent object
method = "_get_attr_%s" % attr_name
if method in src_dict or method in dst_dict:
getter = partial(_generic_g_method, attr_name)
elif ('_get_parent_attribute' in dst_dict or '_get_parent_attribute' in src_dict) and value.inherit:
getter = partial(_generic_g_parent, attr_name)
else:
getter = partial(_generic_g, attr_name)
setter = partial(_generic_s, attr_name)
deleter = partial(_generic_d, attr_name)
dst_dict[attr_name] = property(getter, setter, deleter)
dst_dict['_valid_attrs'][attr_name] = value
dst_dict['_attributes'][attr_name] = Sentinel
dst_dict['_attr_defaults'][attr_name] = value.default
if value.alias is not None:
dst_dict[value.alias] = property(getter, setter, deleter)
dst_dict['_valid_attrs'][value.alias] = value
dst_dict['_alias_attrs'][value.alias] = attr_name
def _process_parents(parents, dst_dict):
'''
Helper method which creates attributes from all parent objects
recursively on through grandparent objects
'''
for parent in parents:
if hasattr(parent, '__dict__'):
_create_attrs(parent.__dict__, dst_dict)
new_dst_dict = parent.__dict__.copy()
new_dst_dict.update(dst_dict)
_process_parents(parent.__bases__, new_dst_dict)
# create some additional class attributes
dct['_attributes'] = {}
dct['_attr_defaults'] = {}
dct['_valid_attrs'] = {}
dct['_alias_attrs'] = {}
# now create the attributes based on the FieldAttributes
# available, including from parent (and grandparent) objects
_create_attrs(dct, dct)
_process_parents(parents, dct)
return super(BaseMeta, cls).__new__(cls, name, parents, dct)
class FieldAttributeBase(with_metaclass(BaseMeta, object)):
def __init__(self):
# initialize the data loader and variable manager, which will be provided
# later when the object is actually loaded
self._loader = None
self._variable_manager = None
# other internal params
self._validated = False
self._squashed = False
self._finalized = False
# every object gets a random uuid:
self._uuid = get_unique_id()
# we create a copy of the attributes here due to the fact that
# it was initialized as a class param in the meta class, so we
# need a unique object here (all members contained within are
# unique already).
self._attributes = self.__class__._attributes.copy()
self._attr_defaults = self.__class__._attr_defaults.copy()
for key, value in self._attr_defaults.items():
if callable(value):
self._attr_defaults[key] = value()
# and init vars, avoid using defaults in field declaration as it lives across plays
self.vars = dict()
def dump_me(self, depth=0):
''' this is never called from production code, it is here to be used when debugging as a 'complex print' '''
if depth == 0:
display.debug("DUMPING OBJECT ------------------------------------------------------")
display.debug("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self)))
if hasattr(self, '_parent') and self._parent:
self._parent.dump_me(depth + 2)
dep_chain = self._parent.get_dep_chain()
if dep_chain:
for dep in dep_chain:
dep.dump_me(depth + 2)
if hasattr(self, '_play') and self._play:
self._play.dump_me(depth + 2)
def preprocess_data(self, ds):
''' infrequently used method to do some pre-processing of legacy terms '''
return ds
def load_data(self, ds, variable_manager=None, loader=None):
''' walk the input datastructure and assign any values '''
if ds is None:
raise AnsibleAssertionError('ds (%s) should not be None but it is.' % ds)
# cache the datastructure internally
setattr(self, '_ds', ds)
# the variable manager class is used to manage and merge variables
# down to a single dictionary for reference in templating, etc.
self._variable_manager = variable_manager
# the data loader class is used to parse data from strings and files
if loader is not None:
self._loader = loader
else:
self._loader = DataLoader()
# call the preprocess_data() function to massage the data into
# something we can more easily parse, and then call the validation
# function on it to ensure there are no incorrect key values
ds = self.preprocess_data(ds)
self._validate_attributes(ds)
# Walk all attributes in the class. We sort them based on their priority
# so that certain fields can be loaded before others, if they are dependent.
for name, attr in sorted(iteritems(self._valid_attrs), key=operator.itemgetter(1)):
# copy the value over unless a _load_field method is defined
target_name = name
if name in self._alias_attrs:
target_name = self._alias_attrs[name]
if name in ds:
method = getattr(self, '_load_%s' % name, None)
if method:
self._attributes[target_name] = method(name, ds[name])
else:
self._attributes[target_name] = ds[name]
# run early, non-critical validation
self.validate()
# return the constructed object
return self
def get_ds(self):
try:
return getattr(self, '_ds')
except AttributeError:
return None
def get_loader(self):
return self._loader
def get_variable_manager(self):
return self._variable_manager
def _post_validate_debugger(self, attr, value, templar):
value = templar.template(value)
valid_values = frozenset(('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never'))
if value and isinstance(value, string_types) and value not in valid_values:
raise AnsibleParserError("'%s' is not a valid value for debugger. Must be one of %s" % (value, ', '.join(valid_values)), obj=self.get_ds())
return value
def _validate_attributes(self, ds):
'''
Ensures that there are no keys in the datastructure which do
not map to attributes for this object.
'''
valid_attrs = frozenset(self._valid_attrs.keys())
for key in ds:
if key not in valid_attrs:
raise AnsibleParserError("'%s' is not a valid attribute for a %s" % (key, self.__class__.__name__), obj=ds)
def validate(self, all_vars=None):
''' validation that is done at parse time, not load time '''
all_vars = {} if all_vars is None else all_vars
if not self._validated:
# walk all fields in the object
for (name, attribute) in iteritems(self._valid_attrs):
if name in self._alias_attrs:
name = self._alias_attrs[name]
# run validator only if present
method = getattr(self, '_validate_%s' % name, None)
if method:
method(attribute, name, getattr(self, name))
else:
# and make sure the attribute is of the type it should be
value = self._attributes[name]
if value is not None:
if attribute.isa == 'string' and isinstance(value, (list, dict)):
raise AnsibleParserError(
"The field '%s' is supposed to be a string type,"
" however the incoming data structure is a %s" % (name, type(value)), obj=self.get_ds()
)
self._validated = True
def squash(self):
'''
Evaluates all attributes and sets them to the evaluated version,
so that all future accesses of attributes do not need to evaluate
parent attributes.
'''
if not self._squashed:
for name in self._valid_attrs.keys():
self._attributes[name] = getattr(self, name)
self._squashed = True
def copy(self):
'''
Create a copy of this object and return it.
'''
try:
new_me = self.__class__()
except RuntimeError as e:
raise AnsibleError("Exceeded maximum object depth. This may have been caused by excessive role recursion", orig_exc=e)
for name in self._valid_attrs.keys():
if name in self._alias_attrs:
continue
new_me._attributes[name] = shallowcopy(self._attributes[name])
new_me._attr_defaults[name] = shallowcopy(self._attr_defaults[name])
new_me._loader = self._loader
new_me._variable_manager = self._variable_manager
new_me._validated = self._validated
new_me._finalized = self._finalized
new_me._uuid = self._uuid
# if the ds value was set on the object, copy it to the new copy too
if hasattr(self, '_ds'):
new_me._ds = self._ds
return new_me
def get_validated_value(self, name, attribute, value, templar):
if attribute.isa == 'string':
value = to_text(value)
elif attribute.isa == 'int':
value = int(value)
elif attribute.isa == 'float':
value = float(value)
elif attribute.isa == 'bool':
value = boolean(value, strict=True)
elif attribute.isa == 'percent':
# special value, which may be an integer or float
# with an optional '%' at the end
if isinstance(value, string_types) and '%' in value:
value = value.replace('%', '')
value = float(value)
elif attribute.isa == 'list':
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
if attribute.listof is not None:
for item in value:
if not isinstance(item, attribute.listof):
raise AnsibleParserError("the field '%s' should be a list of %s, "
"but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds())
elif attribute.required and attribute.listof == string_types:
if item is None or item.strip() == "":
raise AnsibleParserError("the field '%s' is required, and cannot have empty values" % (name,), obj=self.get_ds())
elif attribute.isa == 'set':
if value is None:
value = set()
elif not isinstance(value, (list, set)):
if isinstance(value, string_types):
value = value.split(',')
else:
# Making a list like this handles strings of
# text and bytes properly
value = [value]
if not isinstance(value, set):
value = set(value)
elif attribute.isa == 'dict':
if value is None:
value = dict()
elif not isinstance(value, dict):
raise TypeError("%s is not a dictionary" % value)
elif attribute.isa == 'class':
if not isinstance(value, attribute.class_type):
raise TypeError("%s is not a valid %s (got a %s instead)" % (name, attribute.class_type, type(value)))
value.post_validate(templar=templar)
return value
def post_validate(self, templar):
'''
we can't tell that everything is of the right type until we have
all the variables. Run basic types (from isa) as well as
any _post_validate_<foo> functions.
'''
# save the omit value for later checking
omit_value = templar.available_variables.get('omit')
for (name, attribute) in iteritems(self._valid_attrs):
if attribute.static:
value = getattr(self, name)
# we don't template 'vars' but allow template as values for later use
if name not in ('vars',) and templar.is_template(value):
display.warning('"%s" is not templatable, but we found: %s, '
'it will not be templated and will be used "as is".' % (name, value))
continue
if getattr(self, name) is None:
if not attribute.required:
continue
else:
raise AnsibleParserError("the field '%s' is required but was not set" % name)
elif not attribute.always_post_validate and self.__class__.__name__ not in ('Task', 'Handler', 'PlayContext'):
# Intermediate objects like Play() won't have their fields validated by
# default, as their values are often inherited by other objects and validated
# later, so we don't want them to fail out early
continue
try:
# Run the post-validator if present. These methods are responsible for
# using the given templar to template the values, if required.
method = getattr(self, '_post_validate_%s' % name, None)
if method:
value = method(attribute, getattr(self, name), templar)
elif attribute.isa == 'class':
value = getattr(self, name)
else:
# if the attribute contains a variable, template it now
value = templar.template(getattr(self, name))
# if this evaluated to the omit value, set the value back to
# the default specified in the FieldAttribute and move on
if omit_value is not None and value == omit_value:
if callable(attribute.default):
setattr(self, name, attribute.default())
else:
setattr(self, name, attribute.default)
continue
# and make sure the attribute is of the type it should be
if value is not None:
value = self.get_validated_value(name, attribute, value, templar)
# and assign the massaged value back to the attribute field
setattr(self, name, value)
except (TypeError, ValueError) as e:
value = getattr(self, name)
raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s."
"The error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds(), orig_exc=e)
except (AnsibleUndefinedVariable, UndefinedError) as e:
if templar._fail_on_undefined_errors and name != 'name':
if name == 'args':
msg = "The task includes an option with an undefined variable. The error was: %s" % (to_native(e))
else:
msg = "The field '%s' has an invalid value, which includes an undefined variable. The error was: %s" % (name, to_native(e))
raise AnsibleParserError(msg, obj=self.get_ds(), orig_exc=e)
self._finalized = True
def _load_vars(self, attr, ds):
'''
Vars in a play can be specified either as a dictionary directly, or
as a list of dictionaries. If the later, this method will turn the
list into a single dictionary.
'''
def _validate_variable_keys(ds):
for key in ds:
if not isidentifier(key):
raise TypeError("'%s' is not a valid variable name" % key)
try:
if isinstance(ds, dict):
_validate_variable_keys(ds)
return combine_vars(self.vars, ds)
elif isinstance(ds, list):
all_vars = self.vars
for item in ds:
if not isinstance(item, dict):
raise ValueError
_validate_variable_keys(item)
all_vars = combine_vars(all_vars, item)
return all_vars
elif ds is None:
return {}
else:
raise ValueError
except ValueError as e:
raise AnsibleParserError("Vars in a %s must be specified as a dictionary, or a list of dictionaries" % self.__class__.__name__,
obj=ds, orig_exc=e)
except TypeError as e:
raise AnsibleParserError("Invalid variable name in vars specified for %s: %s" % (self.__class__.__name__, e), obj=ds, orig_exc=e)
def _extend_value(self, value, new_value, prepend=False):
'''
Will extend the value given with new_value (and will turn both
into lists if they are not so already). The values are run through
a set to remove duplicate values.
'''
if not isinstance(value, list):
value = [value]
if not isinstance(new_value, list):
new_value = [new_value]
# Due to where _extend_value may run for some attributes
# it is possible to end up with Sentinel in the list of values
# ensure we strip them
value = [v for v in value if v is not Sentinel]
new_value = [v for v in new_value if v is not Sentinel]
if prepend:
combined = new_value + value
else:
combined = value + new_value
return [i for i, _ in itertools.groupby(combined) if i is not None]
def dump_attrs(self):
'''
Dumps all attributes to a dictionary
'''
attrs = {}
for (name, attribute) in iteritems(self._valid_attrs):
attr = getattr(self, name)
if attribute.isa == 'class' and hasattr(attr, 'serialize'):
attrs[name] = attr.serialize()
else:
attrs[name] = attr
return attrs
def from_attrs(self, attrs):
'''
Loads attributes from a dictionary
'''
for (attr, value) in iteritems(attrs):
if attr in self._valid_attrs:
attribute = self._valid_attrs[attr]
if attribute.isa == 'class' and isinstance(value, dict):
obj = attribute.class_type()
obj.deserialize(value)
setattr(self, attr, obj)
else:
setattr(self, attr, value)
# from_attrs is only used to create a finalized task
# from attrs from the Worker/TaskExecutor
# Those attrs are finalized and squashed in the TE
# and controller side use needs to reflect that
self._finalized = True
self._squashed = True
def serialize(self):
'''
Serializes the object derived from the base object into
a dictionary of values. This only serializes the field
attributes for the object, so this may need to be overridden
for any classes which wish to add additional items not stored
as field attributes.
'''
repr = self.dump_attrs()
# serialize the uuid field
repr['uuid'] = self._uuid
repr['finalized'] = self._finalized
repr['squashed'] = self._squashed
return repr
def deserialize(self, data):
'''
Given a dictionary of values, load up the field attributes for
this object. As with serialize(), if there are any non-field
attribute data members, this method will need to be overridden
and extended.
'''
if not isinstance(data, dict):
raise AnsibleAssertionError('data (%s) should be a dict but is a %s' % (data, type(data)))
for (name, attribute) in iteritems(self._valid_attrs):
if name in data:
setattr(self, name, data[name])
else:
if callable(attribute.default):
setattr(self, name, attribute.default())
else:
setattr(self, name, attribute.default)
# restore the UUID field
setattr(self, '_uuid', data.get('uuid'))
self._finalized = data.get('finalized', False)
self._squashed = data.get('squashed', False)
class Base(FieldAttributeBase):
_name = FieldAttribute(isa='string', default='', always_post_validate=True, inherit=False)
# connection/transport
_connection = FieldAttribute(isa='string', default=context.cliargs_deferred_get('connection'))
_port = FieldAttribute(isa='int')
_remote_user = FieldAttribute(isa='string', default=context.cliargs_deferred_get('remote_user'))
# variables
_vars = FieldAttribute(isa='dict', priority=100, inherit=False, static=True)
# module default params
_module_defaults = FieldAttribute(isa='list', extend=True, prepend=True)
# flags and misc. settings
_environment = FieldAttribute(isa='list', extend=True, prepend=True)
_no_log = FieldAttribute(isa='bool')
_run_once = FieldAttribute(isa='bool')
_ignore_errors = FieldAttribute(isa='bool')
_ignore_unreachable = FieldAttribute(isa='bool')
_check_mode = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('check'))
_diff = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('diff'))
_any_errors_fatal = FieldAttribute(isa='bool', default=C.ANY_ERRORS_FATAL)
_throttle = FieldAttribute(isa='int', default=0)
_timeout = FieldAttribute(isa='int', default=C.TASK_TIMEOUT)
# explicitly invoke a debugger on tasks
_debugger = FieldAttribute(isa='string')
# Privilege escalation
_become = FieldAttribute(isa='bool', default=context.cliargs_deferred_get('become'))
_become_method = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_method'))
_become_user = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_user'))
_become_flags = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_flags'))
_become_exe = FieldAttribute(isa='string', default=context.cliargs_deferred_get('become_exe'))
# used to hold sudo/su stuff
DEPRECATED_ATTRIBUTES = []
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,926 |
Massive overhead of hosts templating for every host of every task
|
### Summary
Executing a playbook with many hosts, there is massive overhead in checking whether the hosts list is a pattern, see for yourself on this callgrind graph:

https://github.com/ansible/ansible/blob/da60525610a384bb04833b1c6429d9db6a87ef64/lib/ansible/vars/manager.py#L490
That check has quadratic runtime with the number of hosts - with a lot of hosts, commenting that check out reduces runtime manyfold.
### Issue Type
Bug Report
### Component Name
variable manager
### Ansible Version
2.10.5, devel
### OS / Environment
Ubuntu
### Steps to Reproduce
```yaml
- hosts: localhost
tasks:
- set_fact:
extra_tasks: true # compute it
hosts: "{{ [an, array, with, 5100, computed, hosts] }}"
- hosts: "{{ hostvars.localhost.hosts }}"
vars:
extra_tasks: "{{ hostvars.localhost.do_extra_tasks }}"
tasks:
# some more tasks here
- name: Execute extra tasks
command: /bin/true # example
when: extra_tasks
```
### Expected Results
Execute in tens of seconds.
### Actual Results
Take over 5 minutes to execute.
|
https://github.com/ansible/ansible/issues/73926
|
https://github.com/ansible/ansible/pull/73941
|
d8bf4206e446c45ba057e85819278cef5fbeff2c
|
3740d7b028b23a1630f544e01cabf1fa3a50257c
| 2021-03-16T18:09:49Z |
python
| 2021-04-19T17:03:40Z |
lib/ansible/plugins/strategy/__init__.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import cmd
import functools
import os
import pprint
import sys
import threading
import time
from collections import deque
from multiprocessing import Lock
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible import context
from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleParserError, AnsibleUndefinedVariable
from ansible.executor import action_write_locks
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_result import TaskResult
from ansible.executor.task_queue_manager import CallbackSend
from ansible.module_utils.six.moves import queue as Queue
from ansible.module_utils.six import iteritems, itervalues, string_types
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.playbook.conditional import Conditional
from ansible.playbook.handler import Handler
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task_include import TaskInclude
from ansible.plugins import loader as plugin_loader
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import wrap_var
from ansible.utils.vars import combine_vars
from ansible.vars.clean import strip_internal_keys, module_response_deepcopy
display = Display()
__all__ = ['StrategyBase']
# This list can be an exact match, or start of string bound
# does not accept regex
ALWAYS_DELEGATE_FACT_PREFIXES = frozenset((
'discovered_interpreter_',
))
class StrategySentinel:
pass
_sentinel = StrategySentinel()
def post_process_whens(result, task, templar):
cond = None
if task.changed_when:
cond = Conditional(loader=templar._loader)
cond.when = task.changed_when
result['changed'] = cond.evaluate_conditional(templar, templar.available_variables)
if task.failed_when:
if cond is None:
cond = Conditional(loader=templar._loader)
cond.when = task.failed_when
failed_when_result = cond.evaluate_conditional(templar, templar.available_variables)
result['failed_when_result'] = result['failed'] = failed_when_result
def results_thread_main(strategy):
while True:
try:
result = strategy._final_q.get()
if isinstance(result, StrategySentinel):
break
elif isinstance(result, CallbackSend):
for arg in result.args:
if isinstance(arg, TaskResult):
strategy.normalize_task_result(arg)
break
strategy._tqm.send_callback(result.method_name, *result.args, **result.kwargs)
elif isinstance(result, TaskResult):
strategy.normalize_task_result(result)
with strategy._results_lock:
# only handlers have the listen attr, so this must be a handler
# we split up the results into two queues here to make sure
# handler and regular result processing don't cross wires
if 'listen' in result._task_fields:
strategy._handler_results.append(result)
else:
strategy._results.append(result)
else:
display.warning('Received an invalid object (%s) in the result queue: %r' % (type(result), result))
except (IOError, EOFError):
break
except Queue.Empty:
pass
def debug_closure(func):
"""Closure to wrap ``StrategyBase._process_pending_results`` and invoke the task debugger"""
@functools.wraps(func)
def inner(self, iterator, one_pass=False, max_passes=None, do_handlers=False):
status_to_stats_map = (
('is_failed', 'failures'),
('is_unreachable', 'dark'),
('is_changed', 'changed'),
('is_skipped', 'skipped'),
)
# We don't know the host yet, copy the previous states, for lookup after we process new results
prev_host_states = iterator._host_states.copy()
results = func(self, iterator, one_pass=one_pass, max_passes=max_passes, do_handlers=do_handlers)
_processed_results = []
for result in results:
task = result._task
host = result._host
_queued_task_args = self._queued_task_cache.pop((host.name, task._uuid), None)
task_vars = _queued_task_args['task_vars']
play_context = _queued_task_args['play_context']
# Try to grab the previous host state, if it doesn't exist use get_host_state to generate an empty state
try:
prev_host_state = prev_host_states[host.name]
except KeyError:
prev_host_state = iterator.get_host_state(host)
while result.needs_debugger(globally_enabled=self.debugger_active):
next_action = NextAction()
dbg = Debugger(task, host, task_vars, play_context, result, next_action)
dbg.cmdloop()
if next_action.result == NextAction.REDO:
# rollback host state
self._tqm.clear_failed_hosts()
iterator._host_states[host.name] = prev_host_state
for method, what in status_to_stats_map:
if getattr(result, method)():
self._tqm._stats.decrement(what, host.name)
self._tqm._stats.decrement('ok', host.name)
# redo
self._queue_task(host, task, task_vars, play_context)
_processed_results.extend(debug_closure(func)(self, iterator, one_pass))
break
elif next_action.result == NextAction.CONTINUE:
_processed_results.append(result)
break
elif next_action.result == NextAction.EXIT:
# Matches KeyboardInterrupt from bin/ansible
sys.exit(99)
else:
_processed_results.append(result)
return _processed_results
return inner
class StrategyBase:
'''
This is the base class for strategy plugins, which contains some common
code useful to all strategies like running handlers, cleanup actions, etc.
'''
# by default, strategies should support throttling but we allow individual
# strategies to disable this and either forego supporting it or managing
# the throttling internally (as `free` does)
ALLOW_BASE_THROTTLING = True
def __init__(self, tqm):
self._tqm = tqm
self._inventory = tqm.get_inventory()
self._workers = tqm._workers
self._variable_manager = tqm.get_variable_manager()
self._loader = tqm.get_loader()
self._final_q = tqm._final_q
self._step = context.CLIARGS.get('step', False)
self._diff = context.CLIARGS.get('diff', False)
# the task cache is a dictionary of tuples of (host.name, task._uuid)
# used to find the original task object of in-flight tasks and to store
# the task args/vars and play context info used to queue the task.
self._queued_task_cache = {}
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
# internal counters
self._pending_results = 0
self._pending_handler_results = 0
self._cur_worker = 0
# this dictionary is used to keep track of hosts that have
# outstanding tasks still in queue
self._blocked_hosts = dict()
# this dictionary is used to keep track of hosts that have
# flushed handlers
self._flushed_hosts = dict()
self._results = deque()
self._handler_results = deque()
self._results_lock = threading.Condition(threading.Lock())
# create the result processing thread for reading results in the background
self._results_thread = threading.Thread(target=results_thread_main, args=(self,))
self._results_thread.daemon = True
self._results_thread.start()
# holds the list of active (persistent) connections to be shutdown at
# play completion
self._active_connections = dict()
# Caches for get_host calls, to avoid calling excessively
# These values should be set at the top of the ``run`` method of each
# strategy plugin. Use ``_set_hosts_cache`` to set these values
self._hosts_cache = []
self._hosts_cache_all = []
self.debugger_active = C.ENABLE_TASK_DEBUGGER
def _set_hosts_cache(self, play, refresh=True):
"""Responsible for setting _hosts_cache and _hosts_cache_all
See comment in ``__init__`` for the purpose of these caches
"""
if not refresh and all((self._hosts_cache, self._hosts_cache_all)):
return
if Templar(None).is_template(play.hosts):
_pattern = 'all'
else:
_pattern = play.hosts or 'all'
self._hosts_cache_all = [h.name for h in self._inventory.get_hosts(pattern=_pattern, ignore_restrictions=True)]
self._hosts_cache = [h.name for h in self._inventory.get_hosts(play.hosts, order=play.order)]
def cleanup(self):
# close active persistent connections
for sock in itervalues(self._active_connections):
try:
conn = Connection(sock)
conn.reset()
except ConnectionError as e:
# most likely socket is already closed
display.debug("got an error while closing persistent connection: %s" % e)
self._final_q.put(_sentinel)
self._results_thread.join()
def run(self, iterator, play_context, result=0):
# execute one more pass through the iterator without peeking, to
# make sure that all of the hosts are advanced to their final task.
# This should be safe, as everything should be ITERATING_COMPLETE by
# this point, though the strategy may not advance the hosts itself.
for host in self._hosts_cache:
if host not in self._tqm._unreachable_hosts:
try:
iterator.get_next_task_for_host(self._inventory.hosts[host])
except KeyError:
iterator.get_next_task_for_host(self._inventory.get_host(host))
# save the failed/unreachable hosts, as the run_handlers()
# method will clear that information during its execution
failed_hosts = iterator.get_failed_hosts()
unreachable_hosts = self._tqm._unreachable_hosts.keys()
display.debug("running handlers")
handler_result = self.run_handlers(iterator, play_context)
if isinstance(handler_result, bool) and not handler_result:
result |= self._tqm.RUN_ERROR
elif not handler_result:
result |= handler_result
# now update with the hosts (if any) that failed or were
# unreachable during the handler execution phase
failed_hosts = set(failed_hosts).union(iterator.get_failed_hosts())
unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys())
# return the appropriate code, depending on the status hosts after the run
if not isinstance(result, bool) and result != self._tqm.RUN_OK:
return result
elif len(unreachable_hosts) > 0:
return self._tqm.RUN_UNREACHABLE_HOSTS
elif len(failed_hosts) > 0:
return self._tqm.RUN_FAILED_HOSTS
else:
return self._tqm.RUN_OK
def get_hosts_remaining(self, play):
self._set_hosts_cache(play, refresh=False)
ignore = set(self._tqm._failed_hosts).union(self._tqm._unreachable_hosts)
return [host for host in self._hosts_cache if host not in ignore]
def get_failed_hosts(self, play):
self._set_hosts_cache(play, refresh=False)
return [host for host in self._hosts_cache if host in self._tqm._failed_hosts]
def add_tqm_variables(self, vars, play):
'''
Base class method to add extra variables/information to the list of task
vars sent through the executor engine regarding the task queue manager state.
'''
vars['ansible_current_hosts'] = self.get_hosts_remaining(play)
vars['ansible_failed_hosts'] = self.get_failed_hosts(play)
def _queue_task(self, host, task, task_vars, play_context):
''' handles queueing the task up to be sent to a worker '''
display.debug("entering _queue_task() for %s/%s" % (host.name, task.action))
# Add a write lock for tasks.
# Maybe this should be added somewhere further up the call stack but
# this is the earliest in the code where we have task (1) extracted
# into its own variable and (2) there's only a single code path
# leading to the module being run. This is called by three
# functions: __init__.py::_do_handler_run(), linear.py::run(), and
# free.py::run() so we'd have to add to all three to do it there.
# The next common higher level is __init__.py::run() and that has
# tasks inside of play_iterator so we'd have to extract them to do it
# there.
if task.action not in action_write_locks.action_write_locks:
display.debug('Creating lock for %s' % task.action)
action_write_locks.action_write_locks[task.action] = Lock()
# create a templar and template things we need later for the queuing process
templar = Templar(loader=self._loader, variables=task_vars)
try:
throttle = int(templar.template(task.throttle))
except Exception as e:
raise AnsibleError("Failed to convert the throttle value to an integer.", obj=task._ds, orig_exc=e)
# and then queue the new task
try:
# Determine the "rewind point" of the worker list. This means we start
# iterating over the list of workers until the end of the list is found.
# Normally, that is simply the length of the workers list (as determined
# by the forks or serial setting), however a task/block/play may "throttle"
# that limit down.
rewind_point = len(self._workers)
if throttle > 0 and self.ALLOW_BASE_THROTTLING:
if task.run_once:
display.debug("Ignoring 'throttle' as 'run_once' is also set for '%s'" % task.get_name())
else:
if throttle <= rewind_point:
display.debug("task: %s, throttle: %d" % (task.get_name(), throttle))
rewind_point = throttle
queued = False
starting_worker = self._cur_worker
while True:
if self._cur_worker >= rewind_point:
self._cur_worker = 0
worker_prc = self._workers[self._cur_worker]
if worker_prc is None or not worker_prc.is_alive():
self._queued_task_cache[(host.name, task._uuid)] = {
'host': host,
'task': task,
'task_vars': task_vars,
'play_context': play_context
}
worker_prc = WorkerProcess(self._final_q, task_vars, host, task, play_context, self._loader, self._variable_manager, plugin_loader)
self._workers[self._cur_worker] = worker_prc
self._tqm.send_callback('v2_runner_on_start', host, task)
worker_prc.start()
display.debug("worker is %d (out of %d available)" % (self._cur_worker + 1, len(self._workers)))
queued = True
self._cur_worker += 1
if self._cur_worker >= rewind_point:
self._cur_worker = 0
if queued:
break
elif self._cur_worker == starting_worker:
time.sleep(0.0001)
if isinstance(task, Handler):
self._pending_handler_results += 1
else:
self._pending_results += 1
except (EOFError, IOError, AssertionError) as e:
# most likely an abort
display.debug("got an error while queuing: %s" % e)
return
display.debug("exiting _queue_task() for %s/%s" % (host.name, task.action))
def get_task_hosts(self, iterator, task_host, task):
if task.run_once:
host_list = [host for host in self._hosts_cache if host not in self._tqm._unreachable_hosts]
else:
host_list = [task_host.name]
return host_list
def get_delegated_hosts(self, result, task):
host_name = result.get('_ansible_delegated_vars', {}).get('ansible_delegated_host', None)
return [host_name or task.delegate_to]
def _set_always_delegated_facts(self, result, task):
"""Sets host facts for ``delegate_to`` hosts for facts that should
always be delegated
This operation mutates ``result`` to remove the always delegated facts
See ``ALWAYS_DELEGATE_FACT_PREFIXES``
"""
if task.delegate_to is None:
return
facts = result['ansible_facts']
always_keys = set()
_add = always_keys.add
for fact_key in facts:
for always_key in ALWAYS_DELEGATE_FACT_PREFIXES:
if fact_key.startswith(always_key):
_add(fact_key)
if always_keys:
_pop = facts.pop
always_facts = {
'ansible_facts': dict((k, _pop(k)) for k in list(facts) if k in always_keys)
}
host_list = self.get_delegated_hosts(result, task)
_set_host_facts = self._variable_manager.set_host_facts
for target_host in host_list:
_set_host_facts(target_host, always_facts)
def normalize_task_result(self, task_result):
"""Normalize a TaskResult to reference actual Host and Task objects
when only given the ``Host.name``, or the ``Task._uuid``
Only the ``Host.name`` and ``Task._uuid`` are commonly sent back from
the ``TaskExecutor`` or ``WorkerProcess`` due to performance concerns
Mutates the original object
"""
if isinstance(task_result._host, string_types):
# If the value is a string, it is ``Host.name``
task_result._host = self._inventory.get_host(to_text(task_result._host))
if isinstance(task_result._task, string_types):
# If the value is a string, it is ``Task._uuid``
queue_cache_entry = (task_result._host.name, task_result._task)
found_task = self._queued_task_cache.get(queue_cache_entry)['task']
original_task = found_task.copy(exclude_parent=True, exclude_tasks=True)
original_task._parent = found_task._parent
original_task.from_attrs(task_result._task_fields)
task_result._task = original_task
return task_result
@debug_closure
def _process_pending_results(self, iterator, one_pass=False, max_passes=None, do_handlers=False):
'''
Reads results off the final queue and takes appropriate action
based on the result (executing callbacks, updating state, etc.).
'''
ret_results = []
handler_templar = Templar(self._loader)
def search_handler_blocks_by_name(handler_name, handler_blocks):
# iterate in reversed order since last handler loaded with the same name wins
for handler_block in reversed(handler_blocks):
for handler_task in handler_block.block:
if handler_task.name:
if not handler_task.cached_name:
if handler_templar.is_template(handler_task.name):
handler_templar.available_variables = self._variable_manager.get_vars(play=iterator._play,
task=handler_task,
_hosts=self._hosts_cache,
_hosts_all=self._hosts_cache_all)
handler_task.name = handler_templar.template(handler_task.name)
handler_task.cached_name = True
try:
# first we check with the full result of get_name(), which may
# include the role name (if the handler is from a role). If that
# is not found, we resort to the simple name field, which doesn't
# have anything extra added to it.
candidates = (
handler_task.name,
handler_task.get_name(include_role_fqcn=False),
handler_task.get_name(include_role_fqcn=True),
)
if handler_name in candidates:
return handler_task
except (UndefinedError, AnsibleUndefinedVariable):
# We skip this handler due to the fact that it may be using
# a variable in the name that was conditionally included via
# set_fact or some other method, and we don't want to error
# out unnecessarily
continue
return None
cur_pass = 0
while True:
try:
self._results_lock.acquire()
if do_handlers:
task_result = self._handler_results.popleft()
else:
task_result = self._results.popleft()
except IndexError:
break
finally:
self._results_lock.release()
original_host = task_result._host
original_task = task_result._task
# all host status messages contain 2 entries: (msg, task_result)
role_ran = False
if task_result.is_failed():
role_ran = True
ignore_errors = original_task.ignore_errors
if not ignore_errors:
display.debug("marking %s as failed" % original_host.name)
if original_task.run_once:
# if we're using run_once, we have to fail every host here
for h in self._inventory.get_hosts(iterator._play.hosts):
if h.name not in self._tqm._unreachable_hosts:
state, _ = iterator.get_next_task_for_host(h, peek=True)
iterator.mark_host_failed(h)
state, new_task = iterator.get_next_task_for_host(h, peek=True)
else:
iterator.mark_host_failed(original_host)
# grab the current state and if we're iterating on the rescue portion
# of a block then we save the failed task in a special var for use
# within the rescue/always
state, _ = iterator.get_next_task_for_host(original_host, peek=True)
if iterator.is_failed(original_host) and state and state.run_state == iterator.ITERATING_COMPLETE:
self._tqm._failed_hosts[original_host.name] = True
# Use of get_active_state() here helps detect proper state if, say, we are in a rescue
# block from an included file (include_tasks). In a non-included rescue case, a rescue
# that starts with a new 'block' will have an active state of ITERATING_TASKS, so we also
# check the current state block tree to see if any blocks are rescuing.
if state and (iterator.get_active_state(state).run_state == iterator.ITERATING_RESCUE or
iterator.is_any_block_rescuing(state)):
self._tqm._stats.increment('rescued', original_host.name)
self._variable_manager.set_nonpersistent_facts(
original_host.name,
dict(
ansible_failed_task=wrap_var(original_task.serialize()),
ansible_failed_result=task_result._result,
),
)
else:
self._tqm._stats.increment('failures', original_host.name)
else:
self._tqm._stats.increment('ok', original_host.name)
self._tqm._stats.increment('ignored', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
self._tqm.send_callback('v2_runner_on_failed', task_result, ignore_errors=ignore_errors)
elif task_result.is_unreachable():
ignore_unreachable = original_task.ignore_unreachable
if not ignore_unreachable:
self._tqm._unreachable_hosts[original_host.name] = True
iterator._play._removed_hosts.append(original_host.name)
else:
self._tqm._stats.increment('skipped', original_host.name)
task_result._result['skip_reason'] = 'Host %s is unreachable' % original_host.name
self._tqm._stats.increment('dark', original_host.name)
self._tqm.send_callback('v2_runner_on_unreachable', task_result)
elif task_result.is_skipped():
self._tqm._stats.increment('skipped', original_host.name)
self._tqm.send_callback('v2_runner_on_skipped', task_result)
else:
role_ran = True
if original_task.loop:
# this task had a loop, and has more than one result, so
# loop over all of them instead of a single result
result_items = task_result._result.get('results', [])
else:
result_items = [task_result._result]
for result_item in result_items:
if '_ansible_notify' in result_item:
if task_result.is_changed():
# The shared dictionary for notified handlers is a proxy, which
# does not detect when sub-objects within the proxy are modified.
# So, per the docs, we reassign the list so the proxy picks up and
# notifies all other threads
for handler_name in result_item['_ansible_notify']:
found = False
# Find the handler using the above helper. First we look up the
# dependency chain of the current task (if it's from a role), otherwise
# we just look through the list of handlers in the current play/all
# roles and use the first one that matches the notify name
target_handler = search_handler_blocks_by_name(handler_name, iterator._play.handlers)
if target_handler is not None:
found = True
if target_handler.notify_host(original_host):
self._tqm.send_callback('v2_playbook_on_notify', target_handler, original_host)
for listening_handler_block in iterator._play.handlers:
for listening_handler in listening_handler_block.block:
listeners = getattr(listening_handler, 'listen', []) or []
if not listeners:
continue
listeners = listening_handler.get_validated_value(
'listen', listening_handler._valid_attrs['listen'], listeners, handler_templar
)
if handler_name not in listeners:
continue
else:
found = True
if listening_handler.notify_host(original_host):
self._tqm.send_callback('v2_playbook_on_notify', listening_handler, original_host)
# and if none were found, then we raise an error
if not found:
msg = ("The requested handler '%s' was not found in either the main handlers list nor in the listening "
"handlers list" % handler_name)
if C.ERROR_ON_MISSING_HANDLER:
raise AnsibleError(msg)
else:
display.warning(msg)
if 'add_host' in result_item:
# this task added a new host (add_host module)
new_host_info = result_item.get('add_host', dict())
self._add_host(new_host_info, result_item)
post_process_whens(result_item, original_task, handler_templar)
elif 'add_group' in result_item:
# this task added a new group (group_by module)
self._add_group(original_host, result_item)
post_process_whens(result_item, original_task, handler_templar)
if 'ansible_facts' in result_item and original_task.action not in C._ACTION_DEBUG:
# if delegated fact and we are delegating facts, we need to change target host for them
if original_task.delegate_to is not None and original_task.delegate_facts:
host_list = self.get_delegated_hosts(result_item, original_task)
else:
# Set facts that should always be on the delegated hosts
self._set_always_delegated_facts(result_item, original_task)
host_list = self.get_task_hosts(iterator, original_host, original_task)
if original_task.action in C._ACTION_INCLUDE_VARS:
for (var_name, var_value) in iteritems(result_item['ansible_facts']):
# find the host we're actually referring too here, which may
# be a host that is not really in inventory at all
for target_host in host_list:
self._variable_manager.set_host_variable(target_host, var_name, var_value)
else:
cacheable = result_item.pop('_ansible_facts_cacheable', False)
for target_host in host_list:
# so set_fact is a misnomer but 'cacheable = true' was meant to create an 'actual fact'
# to avoid issues with precedence and confusion with set_fact normal operation,
# we set BOTH fact and nonpersistent_facts (aka hostvar)
# when fact is retrieved from cache in subsequent operations it will have the lower precedence,
# but for playbook setting it the 'higher' precedence is kept
is_set_fact = original_task.action in C._ACTION_SET_FACT
if not is_set_fact or cacheable:
self._variable_manager.set_host_facts(target_host, result_item['ansible_facts'].copy())
if is_set_fact:
self._variable_manager.set_nonpersistent_facts(target_host, result_item['ansible_facts'].copy())
if 'ansible_stats' in result_item and 'data' in result_item['ansible_stats'] and result_item['ansible_stats']['data']:
if 'per_host' not in result_item['ansible_stats'] or result_item['ansible_stats']['per_host']:
host_list = self.get_task_hosts(iterator, original_host, original_task)
else:
host_list = [None]
data = result_item['ansible_stats']['data']
aggregate = 'aggregate' in result_item['ansible_stats'] and result_item['ansible_stats']['aggregate']
for myhost in host_list:
for k in data.keys():
if aggregate:
self._tqm._stats.update_custom_stats(k, data[k], myhost)
else:
self._tqm._stats.set_custom_stats(k, data[k], myhost)
if 'diff' in task_result._result:
if self._diff or getattr(original_task, 'diff', False):
self._tqm.send_callback('v2_on_file_diff', task_result)
if not isinstance(original_task, TaskInclude):
self._tqm._stats.increment('ok', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
# finally, send the ok for this task
self._tqm.send_callback('v2_runner_on_ok', task_result)
# register final results
if original_task.register:
host_list = self.get_task_hosts(iterator, original_host, original_task)
clean_copy = strip_internal_keys(module_response_deepcopy(task_result._result))
if 'invocation' in clean_copy:
del clean_copy['invocation']
for target_host in host_list:
self._variable_manager.set_nonpersistent_facts(target_host, {original_task.register: clean_copy})
if do_handlers:
self._pending_handler_results -= 1
else:
self._pending_results -= 1
if original_host.name in self._blocked_hosts:
del self._blocked_hosts[original_host.name]
# If this is a role task, mark the parent role as being run (if
# the task was ok or failed, but not skipped or unreachable)
if original_task._role is not None and role_ran: # TODO: and original_task.action not in C._ACTION_INCLUDE_ROLE:?
# lookup the role in the ROLE_CACHE to make sure we're dealing
# with the correct object and mark it as executed
for (entry, role_obj) in iteritems(iterator._play.ROLE_CACHE[original_task._role.get_name()]):
if role_obj._uuid == original_task._role._uuid:
role_obj._had_task_run[original_host.name] = True
ret_results.append(task_result)
if one_pass or max_passes is not None and (cur_pass + 1) >= max_passes:
break
cur_pass += 1
return ret_results
def _wait_on_handler_results(self, iterator, handler, notified_hosts):
'''
Wait for the handler tasks to complete, using a short sleep
between checks to ensure we don't spin lock
'''
ret_results = []
handler_results = 0
display.debug("waiting for handler results...")
while (self._pending_handler_results > 0 and
handler_results < len(notified_hosts) and
not self._tqm._terminated):
if self._tqm.has_dead_workers():
raise AnsibleError("A worker was found in a dead state")
results = self._process_pending_results(iterator, do_handlers=True)
ret_results.extend(results)
handler_results += len([
r._host for r in results if r._host in notified_hosts and
r.task_name == handler.name])
if self._pending_handler_results > 0:
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
display.debug("no more pending handlers, returning what we have")
return ret_results
def _wait_on_pending_results(self, iterator):
'''
Wait for the shared counter to drop to zero, using a short sleep
between checks to ensure we don't spin lock
'''
ret_results = []
display.debug("waiting for pending results...")
while self._pending_results > 0 and not self._tqm._terminated:
if self._tqm.has_dead_workers():
raise AnsibleError("A worker was found in a dead state")
results = self._process_pending_results(iterator)
ret_results.extend(results)
if self._pending_results > 0:
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
display.debug("no more pending results, returning what we have")
return ret_results
def _add_host(self, host_info, result_item):
'''
Helper function to add a new host to inventory based on a task result.
'''
changed = False
if host_info:
host_name = host_info.get('host_name')
# Check if host in inventory, add if not
if host_name not in self._inventory.hosts:
self._inventory.add_host(host_name, 'all')
self._hosts_cache_all.append(host_name)
changed = True
new_host = self._inventory.hosts.get(host_name)
# Set/update the vars for this host
new_host_vars = new_host.get_vars()
new_host_combined_vars = combine_vars(new_host_vars, host_info.get('host_vars', dict()))
if new_host_vars != new_host_combined_vars:
new_host.vars = new_host_combined_vars
changed = True
new_groups = host_info.get('groups', [])
for group_name in new_groups:
if group_name not in self._inventory.groups:
group_name = self._inventory.add_group(group_name)
changed = True
new_group = self._inventory.groups[group_name]
if new_group.add_host(self._inventory.hosts[host_name]):
changed = True
# reconcile inventory, ensures inventory rules are followed
if changed:
self._inventory.reconcile_inventory()
result_item['changed'] = changed
def _add_group(self, host, result_item):
'''
Helper function to add a group (if it does not exist), and to assign the
specified host to that group.
'''
changed = False
# the host here is from the executor side, which means it was a
# serialized/cloned copy and we'll need to look up the proper
# host object from the master inventory
real_host = self._inventory.hosts.get(host.name)
if real_host is None:
if host.name == self._inventory.localhost.name:
real_host = self._inventory.localhost
else:
raise AnsibleError('%s cannot be matched in inventory' % host.name)
group_name = result_item.get('add_group')
parent_group_names = result_item.get('parent_groups', [])
if group_name not in self._inventory.groups:
group_name = self._inventory.add_group(group_name)
for name in parent_group_names:
if name not in self._inventory.groups:
# create the new group and add it to inventory
self._inventory.add_group(name)
changed = True
group = self._inventory.groups[group_name]
for parent_group_name in parent_group_names:
parent_group = self._inventory.groups[parent_group_name]
new = parent_group.add_child_group(group)
if new and not changed:
changed = True
if real_host not in group.get_hosts():
changed = group.add_host(real_host)
if group not in real_host.get_groups():
changed = real_host.add_group(group)
if changed:
self._inventory.reconcile_inventory()
result_item['changed'] = changed
def _copy_included_file(self, included_file):
'''
A proven safe and performant way to create a copy of an included file
'''
ti_copy = included_file._task.copy(exclude_parent=True)
ti_copy._parent = included_file._task._parent
temp_vars = ti_copy.vars.copy()
temp_vars.update(included_file._vars)
ti_copy.vars = temp_vars
return ti_copy
def _load_included_file(self, included_file, iterator, is_handler=False):
'''
Loads an included YAML file of tasks, applying the optional set of variables.
'''
display.debug("loading included file: %s" % included_file._filename)
try:
data = self._loader.load_from_file(included_file._filename)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleError("included task files must contain a list of tasks")
ti_copy = self._copy_included_file(included_file)
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = included_file._task.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(included_file._task.tags) > 0:
raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task). "
"Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
obj=included_file._task._ds)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option",
version='2.12', collection_name='ansible.builtin')
included_file._task.tags = tags
block_list = load_list_of_blocks(
data,
play=iterator._play,
parent_block=ti_copy.build_parent_block(),
role=included_file._task._role,
use_handlers=is_handler,
loader=self._loader,
variable_manager=self._variable_manager,
)
# since we skip incrementing the stats when the task result is
# first processed, we do so now for each host in the list
for host in included_file._hosts:
self._tqm._stats.increment('ok', host.name)
except AnsibleError as e:
if isinstance(e, AnsibleFileNotFound):
reason = "Could not find or access '%s' on the Ansible Controller." % to_text(e.file_name)
else:
reason = to_text(e)
# mark all of the hosts including this file as failed, send callbacks,
# and increment the stats for this host
for host in included_file._hosts:
tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=reason))
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
self._tqm._stats.increment('failures', host.name)
self._tqm.send_callback('v2_runner_on_failed', tr)
return []
# finally, send the callback and return the list of blocks loaded
self._tqm.send_callback('v2_playbook_on_include', included_file)
display.debug("done processing included file")
return block_list
def run_handlers(self, iterator, play_context):
'''
Runs handlers on those hosts which have been notified.
'''
result = self._tqm.RUN_OK
for handler_block in iterator._play.handlers:
# FIXME: handlers need to support the rescue/always portions of blocks too,
# but this may take some work in the iterator and gets tricky when
# we consider the ability of meta tasks to flush handlers
for handler in handler_block.block:
if handler.notified_hosts:
result = self._do_handler_run(handler, handler.get_name(), iterator=iterator, play_context=play_context)
if not result:
break
return result
def _do_handler_run(self, handler, handler_name, iterator, play_context, notified_hosts=None):
# FIXME: need to use iterator.get_failed_hosts() instead?
# if not len(self.get_hosts_remaining(iterator._play)):
# self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
# result = False
# break
if notified_hosts is None:
notified_hosts = handler.notified_hosts[:]
# strategy plugins that filter hosts need access to the iterator to identify failed hosts
failed_hosts = self._filter_notified_failed_hosts(iterator, notified_hosts)
notified_hosts = self._filter_notified_hosts(notified_hosts)
notified_hosts += failed_hosts
if len(notified_hosts) > 0:
self._tqm.send_callback('v2_playbook_on_handler_task_start', handler)
bypass_host_loop = False
try:
action = plugin_loader.action_loader.get(handler.action, class_only=True, collection_list=handler.collections)
if getattr(action, 'BYPASS_HOST_LOOP', False):
bypass_host_loop = True
except KeyError:
# we don't care here, because the action may simply not have a
# corresponding action plugin
pass
host_results = []
for host in notified_hosts:
if not iterator.is_failed(host) or iterator._play.force_handlers:
task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=handler,
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
self.add_tqm_variables(task_vars, play=iterator._play)
templar = Templar(loader=self._loader, variables=task_vars)
if not handler.cached_name:
handler.name = templar.template(handler.name)
handler.cached_name = True
self._queue_task(host, handler, task_vars, play_context)
if templar.template(handler.run_once) or bypass_host_loop:
break
# collect the results from the handler run
host_results = self._wait_on_handler_results(iterator, handler, notified_hosts)
included_files = IncludedFile.process_include_results(
host_results,
iterator=iterator,
loader=self._loader,
variable_manager=self._variable_manager
)
result = True
if len(included_files) > 0:
for included_file in included_files:
try:
new_blocks = self._load_included_file(included_file, iterator=iterator, is_handler=True)
# for every task in each block brought in by the include, add the list
# of hosts which included the file to the notified_handlers dict
for block in new_blocks:
iterator._play.handlers.append(block)
for task in block.block:
task_name = task.get_name()
display.debug("adding task '%s' included in handler '%s'" % (task_name, handler_name))
task.notified_hosts = included_file._hosts[:]
result = self._do_handler_run(
handler=task,
handler_name=task_name,
iterator=iterator,
play_context=play_context,
notified_hosts=included_file._hosts[:],
)
if not result:
break
except AnsibleError as e:
for host in included_file._hosts:
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
display.warning(to_text(e))
continue
# remove hosts from notification list
handler.notified_hosts = [
h for h in handler.notified_hosts
if h not in notified_hosts]
display.debug("done running handlers, result is: %s" % result)
return result
def _filter_notified_failed_hosts(self, iterator, notified_hosts):
return []
def _filter_notified_hosts(self, notified_hosts):
'''
Filter notified hosts accordingly to strategy
'''
# As main strategy is linear, we do not filter hosts
# We return a copy to avoid race conditions
return notified_hosts[:]
def _take_step(self, task, host=None):
ret = False
msg = u'Perform task: %s ' % task
if host:
msg += u'on %s ' % host
msg += u'(N)o/(y)es/(c)ontinue: '
resp = display.prompt(msg)
if resp.lower() in ['y', 'yes']:
display.debug("User ran task")
ret = True
elif resp.lower() in ['c', 'continue']:
display.debug("User ran task and canceled step mode")
self._step = False
ret = True
else:
display.debug("User skipped task")
display.banner(msg)
return ret
def _cond_not_supported_warn(self, task_name):
display.warning("%s task does not support when conditional" % task_name)
def _execute_meta(self, task, play_context, iterator, target_host):
# meta tasks store their args in the _raw_params field of args,
# since they do not use k=v pairs, so get that
meta_action = task.args.get('_raw_params')
def _evaluate_conditional(h):
all_vars = self._variable_manager.get_vars(play=iterator._play, host=h, task=task,
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
templar = Templar(loader=self._loader, variables=all_vars)
return task.evaluate_conditional(templar, all_vars)
skipped = False
msg = ''
skip_reason = '%s conditional evaluated to False' % meta_action
self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
# These don't support "when" conditionals
if meta_action in ('noop', 'flush_handlers', 'refresh_inventory', 'reset_connection') and task.when:
self._cond_not_supported_warn(meta_action)
if meta_action == 'noop':
msg = "noop"
elif meta_action == 'flush_handlers':
self._flushed_hosts[target_host] = True
self.run_handlers(iterator, play_context)
self._flushed_hosts[target_host] = False
msg = "ran handlers"
elif meta_action == 'refresh_inventory':
self._inventory.refresh_inventory()
self._set_hosts_cache(iterator._play)
msg = "inventory successfully refreshed"
elif meta_action == 'clear_facts':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
hostname = host.get_name()
self._variable_manager.clear_facts(hostname)
msg = "facts cleared"
else:
skipped = True
skip_reason += ', not clearing facts and fact cache for %s' % target_host.name
elif meta_action == 'clear_host_errors':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
self._tqm._failed_hosts.pop(host.name, False)
self._tqm._unreachable_hosts.pop(host.name, False)
iterator._host_states[host.name].fail_state = iterator.FAILED_NONE
msg = "cleared host errors"
else:
skipped = True
skip_reason += ', not clearing host error state for %s' % target_host.name
elif meta_action == 'end_play':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
if host.name not in self._tqm._unreachable_hosts:
iterator._host_states[host.name].run_state = iterator.ITERATING_COMPLETE
msg = "ending play"
else:
skipped = True
skip_reason += ', continuing play'
elif meta_action == 'end_host':
if _evaluate_conditional(target_host):
iterator._host_states[target_host.name].run_state = iterator.ITERATING_COMPLETE
iterator._play._removed_hosts.append(target_host.name)
msg = "ending play for %s" % target_host.name
else:
skipped = True
skip_reason += ", continuing execution for %s" % target_host.name
# TODO: Nix msg here? Left for historical reasons, but skip_reason exists now.
msg = "end_host conditional evaluated to false, continuing execution for %s" % target_host.name
elif meta_action == 'role_complete':
# Allow users to use this in a play as reported in https://github.com/ansible/ansible/issues/22286?
# How would this work with allow_duplicates??
if task.implicit:
if target_host.name in task._role._had_task_run:
task._role._completed[target_host.name] = True
msg = 'role_complete for %s' % target_host.name
elif meta_action == 'reset_connection':
all_vars = self._variable_manager.get_vars(play=iterator._play, host=target_host, task=task,
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
templar = Templar(loader=self._loader, variables=all_vars)
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
play_context = play_context.set_task_and_variable_override(task=task, variables=all_vars, templar=templar)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
play_context.post_validate(templar=templar)
# now that the play context is finalized, if the remote_addr is not set
# default to using the host's address field as the remote address
if not play_context.remote_addr:
play_context.remote_addr = target_host.address
# We also add "magic" variables back into the variables dict to make sure
# a certain subset of variables exist.
play_context.update_vars(all_vars)
if target_host in self._active_connections:
connection = Connection(self._active_connections[target_host])
del self._active_connections[target_host]
else:
connection = plugin_loader.connection_loader.get(play_context.connection, play_context, os.devnull)
connection.set_options(task_keys=task.dump_attrs(), var_options=all_vars)
play_context.set_attributes_from_plugin(connection)
if connection:
try:
connection.reset()
msg = 'reset connection'
except ConnectionError as e:
# most likely socket is already closed
display.debug("got an error while closing persistent connection: %s" % e)
else:
msg = 'no connection, nothing to reset'
else:
raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
result = {'msg': msg}
if skipped:
result['skipped'] = True
result['skip_reason'] = skip_reason
else:
result['changed'] = False
display.vv("META: %s" % msg)
res = TaskResult(target_host, task, result)
if skipped:
self._tqm.send_callback('v2_runner_on_skipped', res)
return [res]
def get_hosts_left(self, iterator):
''' returns list of available hosts for this iterator by filtering out unreachables '''
hosts_left = []
for host in self._hosts_cache:
if host not in self._tqm._unreachable_hosts:
try:
hosts_left.append(self._inventory.hosts[host])
except KeyError:
hosts_left.append(self._inventory.get_host(host))
return hosts_left
def update_active_connections(self, results):
''' updates the current active persistent connections '''
for r in results:
if 'args' in r._task_fields:
socket_path = r._task_fields['args'].get('_ansible_socket')
if socket_path:
if r._host not in self._active_connections:
self._active_connections[r._host] = socket_path
class NextAction(object):
""" The next action after an interpreter's exit. """
REDO = 1
CONTINUE = 2
EXIT = 3
def __init__(self, result=EXIT):
self.result = result
class Debugger(cmd.Cmd):
prompt_continuous = '> ' # multiple lines
def __init__(self, task, host, task_vars, play_context, result, next_action):
# cmd.Cmd is old-style class
cmd.Cmd.__init__(self)
self.prompt = '[%s] %s (debug)> ' % (host, task)
self.intro = None
self.scope = {}
self.scope['task'] = task
self.scope['task_vars'] = task_vars
self.scope['host'] = host
self.scope['play_context'] = play_context
self.scope['result'] = result
self.next_action = next_action
def cmdloop(self):
try:
cmd.Cmd.cmdloop(self)
except KeyboardInterrupt:
pass
do_h = cmd.Cmd.do_help
def do_EOF(self, args):
"""Quit"""
return self.do_quit(args)
def do_quit(self, args):
"""Quit"""
display.display('User interrupted execution')
self.next_action.result = NextAction.EXIT
return True
do_q = do_quit
def do_continue(self, args):
"""Continue to next result"""
self.next_action.result = NextAction.CONTINUE
return True
do_c = do_continue
def do_redo(self, args):
"""Schedule task for re-execution. The re-execution may not be the next result"""
self.next_action.result = NextAction.REDO
return True
do_r = do_redo
def do_update_task(self, args):
"""Recreate the task from ``task._ds``, and template with updated ``task_vars``"""
templar = Templar(None, variables=self.scope['task_vars'])
task = self.scope['task']
task = task.load_data(task._ds)
task.post_validate(templar)
self.scope['task'] = task
do_u = do_update_task
def evaluate(self, args):
try:
return eval(args, globals(), self.scope)
except Exception:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def do_pprint(self, args):
"""Pretty Print"""
try:
result = self.evaluate(args)
display.display(pprint.pformat(result))
except Exception:
pass
do_p = do_pprint
def execute(self, args):
try:
code = compile(args + '\n', '<stdin>', 'single')
exec(code, globals(), self.scope)
except Exception:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def default(self, line):
try:
self.execute(line)
except Exception:
pass
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,926 |
Massive overhead of hosts templating for every host of every task
|
### Summary
Executing a playbook with many hosts, there is massive overhead in checking whether the hosts list is a pattern, see for yourself on this callgrind graph:

https://github.com/ansible/ansible/blob/da60525610a384bb04833b1c6429d9db6a87ef64/lib/ansible/vars/manager.py#L490
That check has quadratic runtime with the number of hosts - with a lot of hosts, commenting that check out reduces runtime manyfold.
### Issue Type
Bug Report
### Component Name
variable manager
### Ansible Version
2.10.5, devel
### OS / Environment
Ubuntu
### Steps to Reproduce
```yaml
- hosts: localhost
tasks:
- set_fact:
extra_tasks: true # compute it
hosts: "{{ [an, array, with, 5100, computed, hosts] }}"
- hosts: "{{ hostvars.localhost.hosts }}"
vars:
extra_tasks: "{{ hostvars.localhost.do_extra_tasks }}"
tasks:
# some more tasks here
- name: Execute extra tasks
command: /bin/true # example
when: extra_tasks
```
### Expected Results
Execute in tens of seconds.
### Actual Results
Take over 5 minutes to execute.
|
https://github.com/ansible/ansible/issues/73926
|
https://github.com/ansible/ansible/pull/73941
|
d8bf4206e446c45ba057e85819278cef5fbeff2c
|
3740d7b028b23a1630f544e01cabf1fa3a50257c
| 2021-03-16T18:09:49Z |
python
| 2021-04-19T17:03:40Z |
lib/ansible/vars/manager.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
from collections import defaultdict
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound, AnsibleAssertionError, AnsibleTemplateError
from ansible.inventory.host import Host
from ansible.inventory.helpers import sort_groups, get_group_vars
from ansible.module_utils._text import to_text
from ansible.module_utils.common._collections_compat import Mapping, MutableMapping, Sequence
from ansible.module_utils.six import iteritems, text_type, string_types
from ansible.plugins.loader import lookup_loader
from ansible.vars.fact_cache import FactCache
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.vars import combine_vars, load_extra_vars, load_options_vars
from ansible.utils.unsafe_proxy import wrap_var
from ansible.vars.clean import namespace_facts, clean_facts
from ansible.vars.plugins import get_vars_from_inventory_sources, get_vars_from_path
display = Display()
def preprocess_vars(a):
'''
Ensures that vars contained in the parameter passed in are
returned as a list of dictionaries, to ensure for instance
that vars loaded from a file conform to an expected state.
'''
if a is None:
return None
elif not isinstance(a, list):
data = [a]
else:
data = a
for item in data:
if not isinstance(item, MutableMapping):
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
return data
class VariableManager:
_ALLOWED = frozenset(['plugins_by_group', 'groups_plugins_play', 'groups_plugins_inventory', 'groups_inventory',
'all_plugins_play', 'all_plugins_inventory', 'all_inventory'])
def __init__(self, loader=None, inventory=None, version_info=None):
self._nonpersistent_fact_cache = defaultdict(dict)
self._vars_cache = defaultdict(dict)
self._extra_vars = defaultdict(dict)
self._host_vars_files = defaultdict(dict)
self._group_vars_files = defaultdict(dict)
self._inventory = inventory
self._loader = loader
self._hostvars = None
self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
self._options_vars = load_options_vars(version_info)
# If the basedir is specified as the empty string then it results in cwd being used.
# This is not a safe location to load vars from.
basedir = self._options_vars.get('basedir', False)
self.safe_basedir = bool(basedir is False or basedir)
# load extra vars
self._extra_vars = load_extra_vars(loader=self._loader)
# load fact cache
try:
self._fact_cache = FactCache()
except AnsibleError as e:
# bad cache plugin is not fatal error
# fallback to a dict as in memory cache
display.warning(to_text(e))
self._fact_cache = {}
def __getstate__(self):
data = dict(
fact_cache=self._fact_cache,
np_fact_cache=self._nonpersistent_fact_cache,
vars_cache=self._vars_cache,
extra_vars=self._extra_vars,
host_vars_files=self._host_vars_files,
group_vars_files=self._group_vars_files,
omit_token=self._omit_token,
options_vars=self._options_vars,
inventory=self._inventory,
safe_basedir=self.safe_basedir,
)
return data
def __setstate__(self, data):
self._fact_cache = data.get('fact_cache', defaultdict(dict))
self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
self._vars_cache = data.get('vars_cache', defaultdict(dict))
self._extra_vars = data.get('extra_vars', dict())
self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
self._inventory = data.get('inventory', None)
self._options_vars = data.get('options_vars', dict())
self.safe_basedir = data.get('safe_basedir', False)
self._loader = None
self._hostvars = None
@property
def extra_vars(self):
return self._extra_vars
def set_inventory(self, inventory):
self._inventory = inventory
def get_vars(self, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True,
_hosts=None, _hosts_all=None, stage='task'):
'''
Returns the variables, with optional "context" given via the parameters
for the play, host, and task (which could possibly result in different
sets of variables being returned due to the additional context).
The order of precedence is:
- play->roles->get_default_vars (if there is a play context)
- group_vars_files[host] (if there is a host context)
- host_vars_files[host] (if there is a host context)
- host->get_vars (if there is a host context)
- fact_cache[host] (if there is a host context)
- play vars (if there is a play context)
- play vars_files (if there's no host context, ignore
file names that cannot be templated)
- task->get_vars (if there is a task context)
- vars_cache[host] (if there is a host context)
- extra vars
``_hosts`` and ``_hosts_all`` should be considered private args, with only internal trusted callers relying
on the functionality they provide. These arguments may be removed at a later date without a deprecation
period and without warning.
'''
display.debug("in VariableManager get_vars()")
all_vars = dict()
magic_variables = self._get_magic_variables(
play=play,
host=host,
task=task,
include_hostvars=include_hostvars,
include_delegate_to=include_delegate_to,
_hosts=_hosts,
_hosts_all=_hosts_all,
)
_vars_sources = {}
def _combine_and_track(data, new_data, source):
'''
Wrapper function to update var sources dict and call combine_vars()
See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
'''
if C.DEFAULT_DEBUG:
# Populate var sources dict
for key in new_data:
_vars_sources[key] = source
return combine_vars(data, new_data)
# default for all cases
basedirs = []
if self.safe_basedir: # avoid adhoc/console loading cwd
basedirs = [self._loader.get_basedir()]
if play:
# first we compile any vars specified in defaults/main.yml
# for all roles within the specified play
for role in play.get_roles():
all_vars = _combine_and_track(all_vars, role.get_default_vars(), "role '%s' defaults" % role.name)
if task:
# set basedirs
if C.PLAYBOOK_VARS_ROOT == 'all': # should be default
basedirs = task.get_search_path()
elif C.PLAYBOOK_VARS_ROOT in ('bottom', 'playbook_dir'): # only option in 2.4.0
basedirs = [task.get_search_path()[0]]
elif C.PLAYBOOK_VARS_ROOT != 'top':
# preserves default basedirs, only option pre 2.3
raise AnsibleError('Unknown playbook vars logic: %s' % C.PLAYBOOK_VARS_ROOT)
# if we have a task in this context, and that task has a role, make
# sure it sees its defaults above any other roles, as we previously
# (v1) made sure each task had a copy of its roles default vars
if task._role is not None and (play or task.action in C._ACTION_INCLUDE_ROLE):
all_vars = _combine_and_track(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()),
"role '%s' defaults" % task._role.name)
if host:
# THE 'all' group and the rest of groups for a host, used below
all_group = self._inventory.groups.get('all')
host_groups = sort_groups([g for g in host.get_groups() if g.name not in ['all']])
def _get_plugin_vars(plugin, path, entities):
data = {}
try:
data = plugin.get_vars(self._loader, path, entities)
except AttributeError:
try:
for entity in entities:
if isinstance(entity, Host):
data.update(plugin.get_host_vars(entity.name))
else:
data.update(plugin.get_group_vars(entity.name))
except AttributeError:
if hasattr(plugin, 'run'):
raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
else:
raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
return data
# internal functions that actually do the work
def _plugins_inventory(entities):
''' merges all entities by inventory source '''
return get_vars_from_inventory_sources(self._loader, self._inventory._sources, entities, stage)
def _plugins_play(entities):
''' merges all entities adjacent to play '''
data = {}
for path in basedirs:
data = _combine_and_track(data, get_vars_from_path(self._loader, path, entities, stage), "path '%s'" % path)
return data
# configurable functions that are sortable via config, remember to add to _ALLOWED if expanding this list
def all_inventory():
return all_group.get_vars()
def all_plugins_inventory():
return _plugins_inventory([all_group])
def all_plugins_play():
return _plugins_play([all_group])
def groups_inventory():
''' gets group vars from inventory '''
return get_group_vars(host_groups)
def groups_plugins_inventory():
''' gets plugin sources from inventory for groups '''
return _plugins_inventory(host_groups)
def groups_plugins_play():
''' gets plugin sources from play for groups '''
return _plugins_play(host_groups)
def plugins_by_groups():
'''
merges all plugin sources by group,
This should be used instead, NOT in combination with the other groups_plugins* functions
'''
data = {}
for group in host_groups:
data[group] = _combine_and_track(data[group], _plugins_inventory(group), "inventory group_vars for '%s'" % group)
data[group] = _combine_and_track(data[group], _plugins_play(group), "playbook group_vars for '%s'" % group)
return data
# Merge groups as per precedence config
# only allow to call the functions we want exposed
for entry in C.VARIABLE_PRECEDENCE:
if entry in self._ALLOWED:
display.debug('Calling %s to load vars for %s' % (entry, host.name))
all_vars = _combine_and_track(all_vars, locals()[entry](), "group vars, precedence entry '%s'" % entry)
else:
display.warning('Ignoring unknown variable precedence entry: %s' % (entry))
# host vars, from inventory, inventory adjacent and play adjacent via plugins
all_vars = _combine_and_track(all_vars, host.get_vars(), "host vars for '%s'" % host)
all_vars = _combine_and_track(all_vars, _plugins_inventory([host]), "inventory host_vars for '%s'" % host)
all_vars = _combine_and_track(all_vars, _plugins_play([host]), "playbook host_vars for '%s'" % host)
# finally, the facts caches for this host, if it exists
# TODO: cleaning of facts should eventually become part of taskresults instead of vars
try:
facts = wrap_var(self._fact_cache.get(host.name, {}))
all_vars.update(namespace_facts(facts))
# push facts to main namespace
if C.INJECT_FACTS_AS_VARS:
all_vars = _combine_and_track(all_vars, wrap_var(clean_facts(facts)), "facts")
else:
# always 'promote' ansible_local
all_vars = _combine_and_track(all_vars, wrap_var({'ansible_local': facts.get('ansible_local', {})}), "facts")
except KeyError:
pass
if play:
all_vars = _combine_and_track(all_vars, play.get_vars(), "play vars")
vars_files = play.get_vars_files()
try:
for vars_file_item in vars_files:
# create a set of temporary vars here, which incorporate the extra
# and magic vars so we can properly template the vars_files entries
temp_vars = combine_vars(all_vars, self._extra_vars)
temp_vars = combine_vars(temp_vars, magic_variables)
templar = Templar(loader=self._loader, variables=temp_vars)
# we assume each item in the list is itself a list, as we
# support "conditional includes" for vars_files, which mimics
# the with_first_found mechanism.
vars_file_list = vars_file_item
if not isinstance(vars_file_list, list):
vars_file_list = [vars_file_list]
# now we iterate through the (potential) files, and break out
# as soon as we read one from the list. If none are found, we
# raise an error, which is silently ignored at this point.
try:
for vars_file in vars_file_list:
vars_file = templar.template(vars_file)
if not (isinstance(vars_file, Sequence)):
raise AnsibleError(
"Invalid vars_files entry found: %r\n"
"vars_files entries should be either a string type or "
"a list of string types after template expansion" % vars_file
)
try:
data = preprocess_vars(self._loader.load_from_file(vars_file, unsafe=True))
if data is not None:
for item in data:
all_vars = _combine_and_track(all_vars, item, "play vars_files from '%s'" % vars_file)
break
except AnsibleFileNotFound:
# we continue on loader failures
continue
except AnsibleParserError:
raise
else:
# if include_delegate_to is set to False, we ignore the missing
# vars file here because we're working on a delegated host
if include_delegate_to:
raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
except (UndefinedError, AnsibleUndefinedVariable):
if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'"
% vars_file_item, obj=vars_file_item)
else:
# we do not have a full context here, and the missing variable could be because of that
# so just show a warning and continue
display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
continue
display.vvv("Read vars_file '%s'" % vars_file_item)
except TypeError:
raise AnsibleParserError("Error while reading vars files - please supply a list of file names. "
"Got '%s' of type %s" % (vars_files, type(vars_files)))
# By default, we now merge in all vars from all roles in the play,
# unless the user has disabled this via a config option
if not C.DEFAULT_PRIVATE_ROLE_VARS:
for role in play.get_roles():
all_vars = _combine_and_track(all_vars, role.get_vars(include_params=False), "role '%s' vars" % role.name)
# next, we merge in the vars from the role, which will specifically
# follow the role dependency chain, and then we merge in the tasks
# vars (which will look at parent blocks/task includes)
if task:
if task._role:
all_vars = _combine_and_track(all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False),
"role '%s' vars" % task._role.name)
all_vars = _combine_and_track(all_vars, task.get_vars(), "task vars")
# next, we merge in the vars cache (include vars) and nonpersistent
# facts cache (set_fact/register), in that order
if host:
# include_vars non-persistent cache
all_vars = _combine_and_track(all_vars, self._vars_cache.get(host.get_name(), dict()), "include_vars")
# fact non-persistent cache
all_vars = _combine_and_track(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()), "set_fact")
# next, we merge in role params and task include params
if task:
if task._role:
all_vars = _combine_and_track(all_vars, task._role.get_role_params(task.get_dep_chain()), "role '%s' params" % task._role.name)
# special case for include tasks, where the include params
# may be specified in the vars field for the task, which should
# have higher precedence than the vars/np facts above
all_vars = _combine_and_track(all_vars, task.get_include_params(), "include params")
# extra vars
all_vars = _combine_and_track(all_vars, self._extra_vars, "extra vars")
# magic variables
all_vars = _combine_and_track(all_vars, magic_variables, "magic vars")
# special case for the 'environment' magic variable, as someone
# may have set it as a variable and we don't want to stomp on it
if task:
all_vars['environment'] = task.environment
# 'vars' magic var
if task or play:
# has to be copy, otherwise recursive ref
all_vars['vars'] = all_vars.copy()
# if we have a task and we're delegating to another host, figure out the
# variables for that host now so we don't have to rely on hostvars later
if task and task.delegate_to is not None and include_delegate_to:
all_vars['ansible_delegated_vars'], all_vars['_ansible_loop_cache'] = self._get_delegated_vars(play, task, all_vars)
display.debug("done with get_vars()")
if C.DEFAULT_DEBUG:
# Use VarsWithSources wrapper class to display var sources
return VarsWithSources.new_vars_with_sources(all_vars, _vars_sources)
else:
return all_vars
def _get_magic_variables(self, play, host, task, include_hostvars, include_delegate_to,
_hosts=None, _hosts_all=None):
'''
Returns a dictionary of so-called "magic" variables in Ansible,
which are special variables we set internally for use.
'''
variables = {}
variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
variables['ansible_playbook_python'] = sys.executable
variables['ansible_config_file'] = C.CONFIG_FILE
if play:
# This is a list of all role names of all dependencies for all roles for this play
dependency_role_names = list(set([d.get_name() for r in play.roles for d in r.get_all_dependencies()]))
# This is a list of all role names of all roles for this play
play_role_names = [r.get_name() for r in play.roles]
# ansible_role_names includes all role names, dependent or directly referenced by the play
variables['ansible_role_names'] = list(set(dependency_role_names + play_role_names))
# ansible_play_role_names includes the names of all roles directly referenced by this play
# roles that are implicitly referenced via dependencies are not listed.
variables['ansible_play_role_names'] = play_role_names
# ansible_dependent_role_names includes the names of all roles that are referenced via dependencies
# dependencies that are also explicitly named as roles are included in this list
variables['ansible_dependent_role_names'] = dependency_role_names
# DEPRECATED: role_names should be deprecated in favor of ansible_role_names or ansible_play_role_names
variables['role_names'] = variables['ansible_play_role_names']
variables['ansible_play_name'] = play.get_name()
if task:
if task._role:
variables['role_name'] = task._role.get_name(include_role_fqcn=False)
variables['role_path'] = task._role._role_path
variables['role_uuid'] = text_type(task._role._uuid)
variables['ansible_collection_name'] = task._role._role_collection
variables['ansible_role_name'] = task._role.get_name()
if self._inventory is not None:
variables['groups'] = self._inventory.get_groups_dict()
if play:
templar = Templar(loader=self._loader)
if templar.is_template(play.hosts):
pattern = 'all'
else:
pattern = play.hosts or 'all'
# add the list of hosts in the play, as adjusted for limit/filters
if not _hosts_all:
_hosts_all = [h.name for h in self._inventory.get_hosts(pattern=pattern, ignore_restrictions=True)]
if not _hosts:
_hosts = [h.name for h in self._inventory.get_hosts()]
variables['ansible_play_hosts_all'] = _hosts_all[:]
variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
variables['ansible_play_batch'] = [x for x in _hosts if x not in play._removed_hosts]
# DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
# however this would take work in the templating engine, so for now we'll add both
variables['play_hosts'] = variables['ansible_play_batch']
# the 'omit' value allows params to be left out if the variable they are based on is undefined
variables['omit'] = self._omit_token
# Set options vars
for option, option_value in iteritems(self._options_vars):
variables[option] = option_value
if self._hostvars is not None and include_hostvars:
variables['hostvars'] = self._hostvars
return variables
def _get_delegated_vars(self, play, task, existing_variables):
# This method has a lot of code copied from ``TaskExecutor._get_loop_items``
# if this is failing, and ``TaskExecutor._get_loop_items`` is not
# then more will have to be copied here.
# TODO: dedupe code here and with ``TaskExecutor._get_loop_items``
# this may be possible once we move pre-processing pre fork
if not hasattr(task, 'loop'):
# This "task" is not a Task, so we need to skip it
return {}, None
# we unfortunately need to template the delegate_to field here,
# as we're fetching vars before post_validate has been called on
# the task that has been passed in
vars_copy = existing_variables.copy()
# get search path for this task to pass to lookup plugins
vars_copy['ansible_search_path'] = task.get_search_path()
# ensure basedir is always in (dwim already searches here but we need to display it)
if self._loader.get_basedir() not in vars_copy['ansible_search_path']:
vars_copy['ansible_search_path'].append(self._loader.get_basedir())
templar = Templar(loader=self._loader, variables=vars_copy)
items = []
has_loop = True
if task.loop_with is not None:
if task.loop_with in lookup_loader:
fail = True
if task.loop_with == 'first_found':
# first_found loops are special. If the item is undefined then we want to fall through to the next
fail = False
try:
loop_terms = listify_lookup_plugin_terms(terms=task.loop, templar=templar,
loader=self._loader, fail_on_undefined=fail, convert_bare=False)
if not fail:
loop_terms = [t for t in loop_terms if not templar.is_template(t)]
mylookup = lookup_loader.get(task.loop_with, loader=self._loader, templar=templar)
# give lookup task 'context' for subdir (mostly needed for first_found)
for subdir in ['template', 'var', 'file']: # TODO: move this to constants?
if subdir in task.action:
break
setattr(mylookup, '_subdir', subdir + 's')
items = wrap_var(mylookup.run(terms=loop_terms, variables=vars_copy))
except AnsibleTemplateError:
# This task will be skipped later due to this, so we just setup
# a dummy array for the later code so it doesn't fail
items = [None]
else:
raise AnsibleError("Failed to find the lookup named '%s' in the available lookup plugins" % task.loop_with)
elif task.loop is not None:
try:
items = templar.template(task.loop)
except AnsibleTemplateError:
# This task will be skipped later due to this, so we just setup
# a dummy array for the later code so it doesn't fail
items = [None]
else:
has_loop = False
items = [None]
# since host can change per loop, we keep dict per host name resolved
delegated_host_vars = dict()
item_var = getattr(task.loop_control, 'loop_var', 'item')
cache_items = False
for item in items:
# update the variables with the item value for templating, in case we need it
if item is not None:
vars_copy[item_var] = item
templar.available_variables = vars_copy
delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
if delegated_host_name != task.delegate_to:
cache_items = True
if delegated_host_name is None:
raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
if not isinstance(delegated_host_name, string_types):
raise AnsibleError(message="the field 'delegate_to' has an invalid type (%s), and could not be"
" converted to a string type." % type(delegated_host_name), obj=task._ds)
if delegated_host_name in delegated_host_vars:
# no need to repeat ourselves, as the delegate_to value
# does not appear to be tied to the loop item variable
continue
# now try to find the delegated-to host in inventory, or failing that,
# create a new host on the fly so we can fetch variables for it
delegated_host = None
if self._inventory is not None:
delegated_host = self._inventory.get_host(delegated_host_name)
# try looking it up based on the address field, and finally
# fall back to creating a host on the fly to use for the var lookup
if delegated_host is None:
for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
# check if the address matches, or if both the delegated_to host
# and the current host are in the list of localhost aliases
if h.address == delegated_host_name:
delegated_host = h
break
else:
delegated_host = Host(name=delegated_host_name)
else:
delegated_host = Host(name=delegated_host_name)
# now we go fetch the vars for the delegated-to host and save them in our
# master dictionary of variables to be used later in the TaskExecutor/PlayContext
delegated_host_vars[delegated_host_name] = self.get_vars(
play=play,
host=delegated_host,
task=task,
include_delegate_to=False,
include_hostvars=True,
)
delegated_host_vars[delegated_host_name]['inventory_hostname'] = vars_copy.get('inventory_hostname')
_ansible_loop_cache = None
if has_loop and cache_items:
# delegate_to templating produced a change, so we will cache the templated items
# in a special private hostvar
# this ensures that delegate_to+loop doesn't produce different results than TaskExecutor
# which may reprocess the loop
_ansible_loop_cache = items
return delegated_host_vars, _ansible_loop_cache
def clear_facts(self, hostname):
'''
Clears the facts for a host
'''
self._fact_cache.pop(hostname, None)
def set_host_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
if not isinstance(facts, Mapping):
raise AnsibleAssertionError("the type of 'facts' to set for host_facts should be a Mapping but is a %s" % type(facts))
try:
host_cache = self._fact_cache[host]
except KeyError:
# We get to set this as new
host_cache = facts
else:
if not isinstance(host_cache, MutableMapping):
raise TypeError('The object retrieved for {0} must be a MutableMapping but was'
' a {1}'.format(host, type(host_cache)))
# Update the existing facts
host_cache.update(facts)
# Save the facts back to the backing store
self._fact_cache[host] = host_cache
def set_nonpersistent_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
if not isinstance(facts, Mapping):
raise AnsibleAssertionError("the type of 'facts' to set for nonpersistent_facts should be a Mapping but is a %s" % type(facts))
try:
self._nonpersistent_fact_cache[host].update(facts)
except KeyError:
self._nonpersistent_fact_cache[host] = facts
def set_host_variable(self, host, varname, value):
'''
Sets a value in the vars_cache for a host.
'''
if host not in self._vars_cache:
self._vars_cache[host] = dict()
if varname in self._vars_cache[host] and isinstance(self._vars_cache[host][varname], MutableMapping) and isinstance(value, MutableMapping):
self._vars_cache[host] = combine_vars(self._vars_cache[host], {varname: value})
else:
self._vars_cache[host][varname] = value
class VarsWithSources(MutableMapping):
'''
Dict-like class for vars that also provides source information for each var
This class can only store the source for top-level vars. It does no tracking
on its own, just shows a debug message with the information that it is provided
when a particular var is accessed.
'''
def __init__(self, *args, **kwargs):
''' Dict-compatible constructor '''
self.data = dict(*args, **kwargs)
self.sources = {}
@classmethod
def new_vars_with_sources(cls, data, sources):
''' Alternate constructor method to instantiate class with sources '''
v = cls(data)
v.sources = sources
return v
def get_source(self, key):
return self.sources.get(key, None)
def __getitem__(self, key):
val = self.data[key]
# See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
display.debug("variable '%s' from source: %s" % (key, self.sources.get(key, "unknown")))
return val
def __setitem__(self, key, value):
self.data[key] = value
def __delitem__(self, key):
del self.data[key]
def __iter__(self):
return iter(self.data)
def __len__(self):
return len(self.data)
# Prevent duplicate debug messages by defining our own __contains__ pointing at the underlying dict
def __contains__(self, key):
return self.data.__contains__(key)
def copy(self):
return VarsWithSources.new_vars_with_sources(self.data.copy(), self.sources.copy())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.