status
stringclasses 1
value | repo_name
stringclasses 31
values | repo_url
stringclasses 31
values | issue_id
int64 1
104k
| title
stringlengths 4
369
| body
stringlengths 0
254k
⌀ | issue_url
stringlengths 37
56
| pull_url
stringlengths 37
54
| before_fix_sha
stringlengths 40
40
| after_fix_sha
stringlengths 40
40
| report_datetime
timestamp[us, tz=UTC] | language
stringclasses 5
values | commit_datetime
timestamp[us, tz=UTC] | updated_file
stringlengths 4
188
| file_content
stringlengths 0
5.12M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,532 |
Handler triggered in block does not run rescue/always tasks
|
### Summary
When handlers are run from within a block using `meta: flush_handlers` in the block, then tasks in the `rescue:` and `always:` sections are not executed.
If the block has a `rescue:` section then the failure of a handler triggered in the block will cause the host to be rescued, but the rescue task is not actually executed.
Whether the handlers are defined somewhere else in the play (outside of the block) or defined from within a role that was included in the block does not influence the behavior.
This was first noticed in Ansible 2.15.0.
In previous Ansible versions 2.12.5 and 2.9.10 this functions as expected.
The new behavior is possibly related to the changes introduced to address the following issues:
- #65067
- #52561
### Issue Type
Bug Report
### Component Name
blocks
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.3]
config file = /home/user/git/ansible-galaxy/ansible.cfg
configured module search path = ['/home/user/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/user/venv_3.9/lib/python3.9/site-packages/ansible
ansible collection location = /home/user/.ansible/collections:/usr/share/ansible/collections
executable location = /home/user/venv_3.9/bin/ansible
python version = 3.9.5 (default, Nov 23 2021, 15:27:38) [GCC 9.3.0] (/home/user/venv_3.9/bin/python3.9)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ANSIBLE_NOCOWS(/home/user/git/ansible-galaxy/ansible.cfg) = True
CACHE_PLUGIN(/home/user/git/ansible-galaxy/ansible.cfg) = memory
COLOR_CHANGED(/home/user/git/ansible-galaxy/ansible.cfg) = yellow
COLOR_DEBUG(/home/user/git/ansible-galaxy/ansible.cfg) = dark gray
COLOR_DEPRECATE(/home/user/git/ansible-galaxy/ansible.cfg) = purple
COLOR_DIFF_ADD(/home/user/git/ansible-galaxy/ansible.cfg) = green
COLOR_DIFF_LINES(/home/user/git/ansible-galaxy/ansible.cfg) = cyan
COLOR_DIFF_REMOVE(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_ERROR(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_HIGHLIGHT(/home/user/git/ansible-galaxy/ansible.cfg) = white
COLOR_OK(/home/user/git/ansible-galaxy/ansible.cfg) = green
COLOR_SKIP(/home/user/git/ansible-galaxy/ansible.cfg) = cyan
COLOR_UNREACHABLE(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_VERBOSE(/home/user/git/ansible-galaxy/ansible.cfg) = blue
COLOR_WARN(/home/user/git/ansible-galaxy/ansible.cfg) = bright purple
CONFIG_FILE() = /home/user/git/ansible-galaxy/ansible.cfg
DEFAULT_ASK_PASS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_BECOME(/home/user/git/ansible-galaxy/ansible.cfg) = True
DEFAULT_BECOME_ASK_PASS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_BECOME_METHOD(/home/user/git/ansible-galaxy/ansible.cfg) = 'sudo'
DEFAULT_BECOME_USER(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
DEFAULT_FORCE_HANDLERS(/home/user/git/ansible-galaxy/ansible.cfg) = True
DEFAULT_FORKS(/home/user/git/ansible-galaxy/ansible.cfg) = 40
DEFAULT_GATHERING(/home/user/git/ansible-galaxy/ansible.cfg) = implicit
DEFAULT_LOAD_CALLBACK_PLUGINS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_MANAGED_STR(/home/user/git/ansible-galaxy/ansible.cfg) = %Y-%m-%d %H:%M
DEFAULT_MODULE_COMPRESSION(/home/user/git/ansible-galaxy/ansible.cfg) = 'ZIP_DEFLATED'
DEFAULT_MODULE_NAME(/home/user/git/ansible-galaxy/ansible.cfg) = command
DEFAULT_POLL_INTERVAL(/home/user/git/ansible-galaxy/ansible.cfg) = 15
DEFAULT_REMOTE_PORT(/home/user/git/ansible-galaxy/ansible.cfg) = 22
DEFAULT_REMOTE_USER(/home/user/git/ansible-galaxy/ansible.cfg) = user
DEFAULT_ROLES_PATH(/home/user/git/ansible-galaxy/ansible.cfg) = ['/home/user/git/ansible-galaxy/roles', '/home/user/git/ansible-galaxy/galaxy']
DEFAULT_TIMEOUT(/home/user/git/ansible-galaxy/ansible.cfg) = 20
DEFAULT_TRANSPORT(/home/user/git/ansible-galaxy/ansible.cfg) = smart
DEPRECATION_WARNINGS(/home/user/git/ansible-galaxy/ansible.cfg) = True
EDITOR(env: EDITOR) = vim
HOST_KEY_CHECKING(/home/user/git/ansible-galaxy/ansible.cfg) = False
MAX_FILE_SIZE_FOR_DIFF(/home/user/git/ansible-galaxy/ansible.cfg) = 1048576
RETRY_FILES_ENABLED(/home/user/git/ansible-galaxy/ansible.cfg) = False
SHOW_CUSTOM_STATS(/home/user/git/ansible-galaxy/ansible.cfg) = True
SYSTEM_WARNINGS(/home/user/git/ansible-galaxy/ansible.cfg) = True
BECOME:
======
runas:
_____
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
su:
__
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
sudo:
____
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
CALLBACK:
========
default:
_______
show_custom_stats(/home/user/git/ansible-galaxy/ansible.cfg) = True
CONNECTION:
==========
paramiko_ssh:
____________
host_key_checking(/home/user/git/ansible-galaxy/ansible.cfg) = False
port(/home/user/git/ansible-galaxy/ansible.cfg) = 22
pty(/home/user/git/ansible-galaxy/ansible.cfg) = False
remote_user(/home/user/git/ansible-galaxy/ansible.cfg) = user
ssh_args(/home/user/git/ansible-galaxy/ansible.cfg) = -o PasswordAuthentication=no -o ControlMaster=auto -o ControlPersist=60s
timeout(/home/user/git/ansible-galaxy/ansible.cfg) = 20
ssh:
___
control_path(/home/user/git/ansible-galaxy/ansible.cfg) = %(directory)s/ansi-%%h-%%p-%%r
host_key_checking(/home/user/git/ansible-galaxy/ansible.cfg) = False
pipelining(/home/user/git/ansible-galaxy/ansible.cfg) = True
port(/home/user/git/ansible-galaxy/ansible.cfg) = 22
remote_user(/home/user/git/ansible-galaxy/ansible.cfg) = user
scp_if_ssh(/home/user/git/ansible-galaxy/ansible.cfg) = False
sftp_batch_mode(/home/user/git/ansible-galaxy/ansible.cfg) = False
ssh_args(/home/user/git/ansible-galaxy/ansible.cfg) = -o PasswordAuthentication=no -o ControlMaster=auto -o ControlPersist=60s
timeout(/home/user/git/ansible-galaxy/ansible.cfg) = 20
SHELL:
=====
sh:
__
remote_tmp(/home/user/git/ansible-galaxy/ansible.cfg) = $HOME/.ansible/tmp
world_readable_temp(/home/user/git/ansible-galaxy/ansible.cfg) = False
```
### OS / Environment
RHEL7/8/9
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Demo handler in block rescue/always error
become: false
gather_facts: false
hosts: all
tasks:
- block:
- name: Block debug
debug:
msg: "debug 1 - notify handler in block"
changed_when: True
notify: Handler
- meta: flush_handlers
rescue:
- name: Rescue debug
debug:
msg: "debug 2 - rescue failed hosts"
always:
- name: Always debug
debug:
msg: "debug 3 - run on all hosts"
handlers:
- name: Handler
fail:
when: inventory_hostname == 'host1'
```
### Expected Results
The handler triggered in the block should fail on host1.
The rescue task should run on host1 and the host should be marked as rescued instead of failed.
The always task should run on all hosts (also on the failed host1).
```console
# Ansible 2.12.5
$ ansible-playbook test.yml -i inventory.yml -l host1,host2 -D
PLAY [Demo handler in block rescue/always error] ******************************************************
TASK [Block debug] ************************************************************************************
changed: [host1] => {
"msg": "debug 1 - notify handler in block"
}
changed: [host2] => {
"msg": "debug 1 - notify handler in block"
}
TASK [meta] *******************************************************************************************
RUNNING HANDLER [Handler] *****************************************************************************
fatal: [host1]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
skipping: [host2]
TASK [Rescue debug] ***********************************************************************************
ok: [host1] => {
"msg": "debug 2 - rescue failed hosts"
}
TASK [Always debug] ***********************************************************************************
ok: [host1] => {
"msg": "debug 3 - run on all hosts"
}
ok: [host2] => {
"msg": "debug 3 - run on all hosts"
}
PLAY RECAP ********************************************************************************************
host1 : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=1 ignored=0
host2 : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0
```
### Actual Results
```console
# Ansible 2.15.3
$ ansible-playbook test.yml -i inventory.yml -l host1,host2 -D
PLAY [Demo handler in block rescue/always error] ******************************************************
TASK [Block debug] ************************************************************************************
changed: [host1] => {
"msg": "debug 1 - notify handler in block"
}
changed: [host2] => {
"msg": "debug 1 - notify handler in block"
}
TASK [meta] *******************************************************************************************
TASK [meta] *******************************************************************************************
RUNNING HANDLER [Handler] *****************************************************************************
fatal: [host1]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
skipping: [host2]
TASK [Always debug] ***********************************************************************************
ok: [host2] => {
"msg": "debug 3 - run on all hosts"
}
PLAY RECAP ********************************************************************************************
host1 : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=1 ignored=0
host2 : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81532
|
https://github.com/ansible/ansible/pull/81572
|
9c09ed73928272f898d18a2eada21f7357b418e4
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
| 2023-08-17T14:50:08Z |
python
| 2023-11-13T08:57:43Z |
changelogs/fragments/81532-fix-nested-flush_handlers.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,532 |
Handler triggered in block does not run rescue/always tasks
|
### Summary
When handlers are run from within a block using `meta: flush_handlers` in the block, then tasks in the `rescue:` and `always:` sections are not executed.
If the block has a `rescue:` section then the failure of a handler triggered in the block will cause the host to be rescued, but the rescue task is not actually executed.
Whether the handlers are defined somewhere else in the play (outside of the block) or defined from within a role that was included in the block does not influence the behavior.
This was first noticed in Ansible 2.15.0.
In previous Ansible versions 2.12.5 and 2.9.10 this functions as expected.
The new behavior is possibly related to the changes introduced to address the following issues:
- #65067
- #52561
### Issue Type
Bug Report
### Component Name
blocks
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.3]
config file = /home/user/git/ansible-galaxy/ansible.cfg
configured module search path = ['/home/user/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/user/venv_3.9/lib/python3.9/site-packages/ansible
ansible collection location = /home/user/.ansible/collections:/usr/share/ansible/collections
executable location = /home/user/venv_3.9/bin/ansible
python version = 3.9.5 (default, Nov 23 2021, 15:27:38) [GCC 9.3.0] (/home/user/venv_3.9/bin/python3.9)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ANSIBLE_NOCOWS(/home/user/git/ansible-galaxy/ansible.cfg) = True
CACHE_PLUGIN(/home/user/git/ansible-galaxy/ansible.cfg) = memory
COLOR_CHANGED(/home/user/git/ansible-galaxy/ansible.cfg) = yellow
COLOR_DEBUG(/home/user/git/ansible-galaxy/ansible.cfg) = dark gray
COLOR_DEPRECATE(/home/user/git/ansible-galaxy/ansible.cfg) = purple
COLOR_DIFF_ADD(/home/user/git/ansible-galaxy/ansible.cfg) = green
COLOR_DIFF_LINES(/home/user/git/ansible-galaxy/ansible.cfg) = cyan
COLOR_DIFF_REMOVE(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_ERROR(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_HIGHLIGHT(/home/user/git/ansible-galaxy/ansible.cfg) = white
COLOR_OK(/home/user/git/ansible-galaxy/ansible.cfg) = green
COLOR_SKIP(/home/user/git/ansible-galaxy/ansible.cfg) = cyan
COLOR_UNREACHABLE(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_VERBOSE(/home/user/git/ansible-galaxy/ansible.cfg) = blue
COLOR_WARN(/home/user/git/ansible-galaxy/ansible.cfg) = bright purple
CONFIG_FILE() = /home/user/git/ansible-galaxy/ansible.cfg
DEFAULT_ASK_PASS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_BECOME(/home/user/git/ansible-galaxy/ansible.cfg) = True
DEFAULT_BECOME_ASK_PASS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_BECOME_METHOD(/home/user/git/ansible-galaxy/ansible.cfg) = 'sudo'
DEFAULT_BECOME_USER(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
DEFAULT_FORCE_HANDLERS(/home/user/git/ansible-galaxy/ansible.cfg) = True
DEFAULT_FORKS(/home/user/git/ansible-galaxy/ansible.cfg) = 40
DEFAULT_GATHERING(/home/user/git/ansible-galaxy/ansible.cfg) = implicit
DEFAULT_LOAD_CALLBACK_PLUGINS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_MANAGED_STR(/home/user/git/ansible-galaxy/ansible.cfg) = %Y-%m-%d %H:%M
DEFAULT_MODULE_COMPRESSION(/home/user/git/ansible-galaxy/ansible.cfg) = 'ZIP_DEFLATED'
DEFAULT_MODULE_NAME(/home/user/git/ansible-galaxy/ansible.cfg) = command
DEFAULT_POLL_INTERVAL(/home/user/git/ansible-galaxy/ansible.cfg) = 15
DEFAULT_REMOTE_PORT(/home/user/git/ansible-galaxy/ansible.cfg) = 22
DEFAULT_REMOTE_USER(/home/user/git/ansible-galaxy/ansible.cfg) = user
DEFAULT_ROLES_PATH(/home/user/git/ansible-galaxy/ansible.cfg) = ['/home/user/git/ansible-galaxy/roles', '/home/user/git/ansible-galaxy/galaxy']
DEFAULT_TIMEOUT(/home/user/git/ansible-galaxy/ansible.cfg) = 20
DEFAULT_TRANSPORT(/home/user/git/ansible-galaxy/ansible.cfg) = smart
DEPRECATION_WARNINGS(/home/user/git/ansible-galaxy/ansible.cfg) = True
EDITOR(env: EDITOR) = vim
HOST_KEY_CHECKING(/home/user/git/ansible-galaxy/ansible.cfg) = False
MAX_FILE_SIZE_FOR_DIFF(/home/user/git/ansible-galaxy/ansible.cfg) = 1048576
RETRY_FILES_ENABLED(/home/user/git/ansible-galaxy/ansible.cfg) = False
SHOW_CUSTOM_STATS(/home/user/git/ansible-galaxy/ansible.cfg) = True
SYSTEM_WARNINGS(/home/user/git/ansible-galaxy/ansible.cfg) = True
BECOME:
======
runas:
_____
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
su:
__
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
sudo:
____
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
CALLBACK:
========
default:
_______
show_custom_stats(/home/user/git/ansible-galaxy/ansible.cfg) = True
CONNECTION:
==========
paramiko_ssh:
____________
host_key_checking(/home/user/git/ansible-galaxy/ansible.cfg) = False
port(/home/user/git/ansible-galaxy/ansible.cfg) = 22
pty(/home/user/git/ansible-galaxy/ansible.cfg) = False
remote_user(/home/user/git/ansible-galaxy/ansible.cfg) = user
ssh_args(/home/user/git/ansible-galaxy/ansible.cfg) = -o PasswordAuthentication=no -o ControlMaster=auto -o ControlPersist=60s
timeout(/home/user/git/ansible-galaxy/ansible.cfg) = 20
ssh:
___
control_path(/home/user/git/ansible-galaxy/ansible.cfg) = %(directory)s/ansi-%%h-%%p-%%r
host_key_checking(/home/user/git/ansible-galaxy/ansible.cfg) = False
pipelining(/home/user/git/ansible-galaxy/ansible.cfg) = True
port(/home/user/git/ansible-galaxy/ansible.cfg) = 22
remote_user(/home/user/git/ansible-galaxy/ansible.cfg) = user
scp_if_ssh(/home/user/git/ansible-galaxy/ansible.cfg) = False
sftp_batch_mode(/home/user/git/ansible-galaxy/ansible.cfg) = False
ssh_args(/home/user/git/ansible-galaxy/ansible.cfg) = -o PasswordAuthentication=no -o ControlMaster=auto -o ControlPersist=60s
timeout(/home/user/git/ansible-galaxy/ansible.cfg) = 20
SHELL:
=====
sh:
__
remote_tmp(/home/user/git/ansible-galaxy/ansible.cfg) = $HOME/.ansible/tmp
world_readable_temp(/home/user/git/ansible-galaxy/ansible.cfg) = False
```
### OS / Environment
RHEL7/8/9
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Demo handler in block rescue/always error
become: false
gather_facts: false
hosts: all
tasks:
- block:
- name: Block debug
debug:
msg: "debug 1 - notify handler in block"
changed_when: True
notify: Handler
- meta: flush_handlers
rescue:
- name: Rescue debug
debug:
msg: "debug 2 - rescue failed hosts"
always:
- name: Always debug
debug:
msg: "debug 3 - run on all hosts"
handlers:
- name: Handler
fail:
when: inventory_hostname == 'host1'
```
### Expected Results
The handler triggered in the block should fail on host1.
The rescue task should run on host1 and the host should be marked as rescued instead of failed.
The always task should run on all hosts (also on the failed host1).
```console
# Ansible 2.12.5
$ ansible-playbook test.yml -i inventory.yml -l host1,host2 -D
PLAY [Demo handler in block rescue/always error] ******************************************************
TASK [Block debug] ************************************************************************************
changed: [host1] => {
"msg": "debug 1 - notify handler in block"
}
changed: [host2] => {
"msg": "debug 1 - notify handler in block"
}
TASK [meta] *******************************************************************************************
RUNNING HANDLER [Handler] *****************************************************************************
fatal: [host1]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
skipping: [host2]
TASK [Rescue debug] ***********************************************************************************
ok: [host1] => {
"msg": "debug 2 - rescue failed hosts"
}
TASK [Always debug] ***********************************************************************************
ok: [host1] => {
"msg": "debug 3 - run on all hosts"
}
ok: [host2] => {
"msg": "debug 3 - run on all hosts"
}
PLAY RECAP ********************************************************************************************
host1 : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=1 ignored=0
host2 : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0
```
### Actual Results
```console
# Ansible 2.15.3
$ ansible-playbook test.yml -i inventory.yml -l host1,host2 -D
PLAY [Demo handler in block rescue/always error] ******************************************************
TASK [Block debug] ************************************************************************************
changed: [host1] => {
"msg": "debug 1 - notify handler in block"
}
changed: [host2] => {
"msg": "debug 1 - notify handler in block"
}
TASK [meta] *******************************************************************************************
TASK [meta] *******************************************************************************************
RUNNING HANDLER [Handler] *****************************************************************************
fatal: [host1]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
skipping: [host2]
TASK [Always debug] ***********************************************************************************
ok: [host2] => {
"msg": "debug 3 - run on all hosts"
}
PLAY RECAP ********************************************************************************************
host1 : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=1 ignored=0
host2 : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81532
|
https://github.com/ansible/ansible/pull/81572
|
9c09ed73928272f898d18a2eada21f7357b418e4
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
| 2023-08-17T14:50:08Z |
python
| 2023-11-13T08:57:43Z |
lib/ansible/executor/play_iterator.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import fnmatch
from enum import IntEnum, IntFlag
from ansible import constants as C
from ansible.errors import AnsibleAssertionError
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.playbook.block import Block
from ansible.playbook.task import Task
from ansible.utils.display import Display
display = Display()
__all__ = ['PlayIterator', 'IteratingStates', 'FailedStates']
class IteratingStates(IntEnum):
SETUP = 0
TASKS = 1
RESCUE = 2
ALWAYS = 3
HANDLERS = 4
COMPLETE = 5
class FailedStates(IntFlag):
NONE = 0
SETUP = 1
TASKS = 2
RESCUE = 4
ALWAYS = 8
HANDLERS = 16
class HostState:
def __init__(self, blocks):
self._blocks = blocks[:]
self.handlers = []
self.handler_notifications = []
self.cur_block = 0
self.cur_regular_task = 0
self.cur_rescue_task = 0
self.cur_always_task = 0
self.cur_handlers_task = 0
self.run_state = IteratingStates.SETUP
self.fail_state = FailedStates.NONE
self.pre_flushing_run_state = None
self.update_handlers = True
self.pending_setup = False
self.tasks_child_state = None
self.rescue_child_state = None
self.always_child_state = None
self.did_rescue = False
self.did_start_at_task = False
def __repr__(self):
return "HostState(%r)" % self._blocks
def __str__(self):
return ("HOST STATE: block=%d, task=%d, rescue=%d, always=%d, handlers=%d, run_state=%s, fail_state=%s, "
"pre_flushing_run_state=%s, update_handlers=%s, pending_setup=%s, "
"tasks child state? (%s), rescue child state? (%s), always child state? (%s), "
"did rescue? %s, did start at task? %s" % (
self.cur_block,
self.cur_regular_task,
self.cur_rescue_task,
self.cur_always_task,
self.cur_handlers_task,
self.run_state,
self.fail_state,
self.pre_flushing_run_state,
self.update_handlers,
self.pending_setup,
self.tasks_child_state,
self.rescue_child_state,
self.always_child_state,
self.did_rescue,
self.did_start_at_task,
))
def __eq__(self, other):
if not isinstance(other, HostState):
return False
for attr in ('_blocks',
'cur_block', 'cur_regular_task', 'cur_rescue_task', 'cur_always_task', 'cur_handlers_task',
'run_state', 'fail_state', 'pre_flushing_run_state', 'update_handlers', 'pending_setup',
'tasks_child_state', 'rescue_child_state', 'always_child_state'):
if getattr(self, attr) != getattr(other, attr):
return False
return True
def get_current_block(self):
return self._blocks[self.cur_block]
def copy(self):
new_state = HostState(self._blocks)
new_state.handlers = self.handlers[:]
new_state.handler_notifications = self.handler_notifications[:]
new_state.cur_block = self.cur_block
new_state.cur_regular_task = self.cur_regular_task
new_state.cur_rescue_task = self.cur_rescue_task
new_state.cur_always_task = self.cur_always_task
new_state.cur_handlers_task = self.cur_handlers_task
new_state.run_state = self.run_state
new_state.fail_state = self.fail_state
new_state.pre_flushing_run_state = self.pre_flushing_run_state
new_state.update_handlers = self.update_handlers
new_state.pending_setup = self.pending_setup
new_state.did_rescue = self.did_rescue
new_state.did_start_at_task = self.did_start_at_task
if self.tasks_child_state is not None:
new_state.tasks_child_state = self.tasks_child_state.copy()
if self.rescue_child_state is not None:
new_state.rescue_child_state = self.rescue_child_state.copy()
if self.always_child_state is not None:
new_state.always_child_state = self.always_child_state.copy()
return new_state
class PlayIterator:
def __init__(self, inventory, play, play_context, variable_manager, all_vars, start_at_done=False):
self._play = play
self._blocks = []
self._variable_manager = variable_manager
setup_block = Block(play=self._play)
# Gathering facts with run_once would copy the facts from one host to
# the others.
setup_block.run_once = False
setup_task = Task(block=setup_block)
setup_task.action = 'gather_facts'
# TODO: hardcoded resolution here, but should use actual resolution code in the end,
# in case of 'legacy' mismatch
setup_task.resolved_action = 'ansible.builtin.gather_facts'
setup_task.name = 'Gathering Facts'
setup_task.args = {}
# Unless play is specifically tagged, gathering should 'always' run
if not self._play.tags:
setup_task.tags = ['always']
# Default options to gather
for option in ('gather_subset', 'gather_timeout', 'fact_path'):
value = getattr(self._play, option, None)
if value is not None:
setup_task.args[option] = value
setup_task.set_loader(self._play._loader)
# short circuit fact gathering if the entire playbook is conditional
if self._play._included_conditional is not None:
setup_task.when = self._play._included_conditional[:]
setup_block.block = [setup_task]
setup_block = setup_block.filter_tagged_tasks(all_vars)
self._blocks.append(setup_block)
# keep flatten (no blocks) list of all tasks from the play
# used for the lockstep mechanism in the linear strategy
self.all_tasks = setup_block.get_tasks()
for block in self._play.compile():
new_block = block.filter_tagged_tasks(all_vars)
if new_block.has_tasks():
self._blocks.append(new_block)
self.all_tasks.extend(new_block.get_tasks())
# keep list of all handlers, it is copied into each HostState
# at the beginning of IteratingStates.HANDLERS
# the copy happens at each flush in order to restore the original
# list and remove any included handlers that might not be notified
# at the particular flush
self.handlers = [h for b in self._play.handlers for h in b.block]
self._host_states = {}
start_at_matched = False
batch = inventory.get_hosts(self._play.hosts, order=self._play.order)
self.batch_size = len(batch)
for host in batch:
self.set_state_for_host(host.name, HostState(blocks=self._blocks))
# if we're looking to start at a specific task, iterate through
# the tasks for this host until we find the specified task
if play_context.start_at_task is not None and not start_at_done:
while True:
(s, task) = self.get_next_task_for_host(host, peek=True)
if s.run_state == IteratingStates.COMPLETE:
break
if task.name == play_context.start_at_task or (task.name and fnmatch.fnmatch(task.name, play_context.start_at_task)) or \
task.get_name() == play_context.start_at_task or fnmatch.fnmatch(task.get_name(), play_context.start_at_task):
start_at_matched = True
break
self.set_state_for_host(host.name, s)
# finally, reset the host's state to IteratingStates.SETUP
if start_at_matched:
self._host_states[host.name].did_start_at_task = True
self._host_states[host.name].run_state = IteratingStates.SETUP
if start_at_matched:
# we have our match, so clear the start_at_task field on the
# play context to flag that we've started at a task (and future
# plays won't try to advance)
play_context.start_at_task = None
self.end_play = False
self.cur_task = 0
def get_host_state(self, host):
# Since we're using the PlayIterator to carry forward failed hosts,
# in the event that a previous host was not in the current inventory
# we create a stub state for it now
if host.name not in self._host_states:
self.set_state_for_host(host.name, HostState(blocks=[]))
return self._host_states[host.name].copy()
def get_next_task_for_host(self, host, peek=False):
display.debug("getting the next task for host %s" % host.name)
s = self.get_host_state(host)
task = None
if s.run_state == IteratingStates.COMPLETE:
display.debug("host %s is done iterating, returning" % host.name)
return (s, None)
(s, task) = self._get_next_task_from_state(s, host=host)
if not peek:
self.set_state_for_host(host.name, s)
display.debug("done getting next task for host %s" % host.name)
display.debug(" ^ task is: %s" % task)
display.debug(" ^ state is: %s" % s)
return (s, task)
def _get_next_task_from_state(self, state, host):
task = None
# try and find the next task, given the current state.
while True:
# try to get the current block from the list of blocks, and
# if we run past the end of the list we know we're done with
# this block
try:
block = state._blocks[state.cur_block]
except IndexError:
state.run_state = IteratingStates.COMPLETE
return (state, None)
if state.run_state == IteratingStates.SETUP:
# First, we check to see if we were pending setup. If not, this is
# the first trip through IteratingStates.SETUP, so we set the pending_setup
# flag and try to determine if we do in fact want to gather facts for
# the specified host.
if not state.pending_setup:
state.pending_setup = True
# Gather facts if the default is 'smart' and we have not yet
# done it for this host; or if 'explicit' and the play sets
# gather_facts to True; or if 'implicit' and the play does
# NOT explicitly set gather_facts to False.
gathering = C.DEFAULT_GATHERING
implied = self._play.gather_facts is None or boolean(self._play.gather_facts, strict=False)
if (gathering == 'implicit' and implied) or \
(gathering == 'explicit' and boolean(self._play.gather_facts, strict=False)) or \
(gathering == 'smart' and implied and not (self._variable_manager._fact_cache.get(host.name, {}).get('_ansible_facts_gathered', False))):
# The setup block is always self._blocks[0], as we inject it
# during the play compilation in __init__ above.
setup_block = self._blocks[0]
if setup_block.has_tasks() and len(setup_block.block) > 0:
task = setup_block.block[0]
else:
# This is the second trip through IteratingStates.SETUP, so we clear
# the flag and move onto the next block in the list while setting
# the run state to IteratingStates.TASKS
state.pending_setup = False
state.run_state = IteratingStates.TASKS
if not state.did_start_at_task:
state.cur_block += 1
state.cur_regular_task = 0
state.cur_rescue_task = 0
state.cur_always_task = 0
state.tasks_child_state = None
state.rescue_child_state = None
state.always_child_state = None
elif state.run_state == IteratingStates.TASKS:
# clear the pending setup flag, since we're past that and it didn't fail
if state.pending_setup:
state.pending_setup = False
# First, we check for a child task state that is not failed, and if we
# have one recurse into it for the next task. If we're done with the child
# state, we clear it and drop back to getting the next task from the list.
if state.tasks_child_state:
(state.tasks_child_state, task) = self._get_next_task_from_state(state.tasks_child_state, host=host)
if self._check_failed_state(state.tasks_child_state):
# failed child state, so clear it and move into the rescue portion
state.tasks_child_state = None
self._set_failed_state(state)
else:
# get the next task recursively
if task is None or state.tasks_child_state.run_state == IteratingStates.COMPLETE:
# we're done with the child state, so clear it and continue
# back to the top of the loop to get the next task
state.tasks_child_state = None
continue
else:
# First here, we check to see if we've failed anywhere down the chain
# of states we have, and if so we move onto the rescue portion. Otherwise,
# we check to see if we've moved past the end of the list of tasks. If so,
# we move into the always portion of the block, otherwise we get the next
# task from the list.
if self._check_failed_state(state):
state.run_state = IteratingStates.RESCUE
elif state.cur_regular_task >= len(block.block):
state.run_state = IteratingStates.ALWAYS
else:
task = block.block[state.cur_regular_task]
# if the current task is actually a child block, create a child
# state for us to recurse into on the next pass
if isinstance(task, Block):
state.tasks_child_state = HostState(blocks=[task])
state.tasks_child_state.run_state = IteratingStates.TASKS
# since we've created the child state, clear the task
# so we can pick up the child state on the next pass
task = None
state.cur_regular_task += 1
elif state.run_state == IteratingStates.RESCUE:
# The process here is identical to IteratingStates.TASKS, except instead
# we move into the always portion of the block.
if state.rescue_child_state:
(state.rescue_child_state, task) = self._get_next_task_from_state(state.rescue_child_state, host=host)
if self._check_failed_state(state.rescue_child_state):
state.rescue_child_state = None
self._set_failed_state(state)
else:
if task is None or state.rescue_child_state.run_state == IteratingStates.COMPLETE:
state.rescue_child_state = None
continue
else:
if state.fail_state & FailedStates.RESCUE == FailedStates.RESCUE:
state.run_state = IteratingStates.ALWAYS
elif state.cur_rescue_task >= len(block.rescue):
if len(block.rescue) > 0:
state.fail_state = FailedStates.NONE
state.run_state = IteratingStates.ALWAYS
state.did_rescue = True
else:
task = block.rescue[state.cur_rescue_task]
if isinstance(task, Block):
state.rescue_child_state = HostState(blocks=[task])
state.rescue_child_state.run_state = IteratingStates.TASKS
task = None
state.cur_rescue_task += 1
elif state.run_state == IteratingStates.ALWAYS:
# And again, the process here is identical to IteratingStates.TASKS, except
# instead we either move onto the next block in the list, or we set the
# run state to IteratingStates.COMPLETE in the event of any errors, or when we
# have hit the end of the list of blocks.
if state.always_child_state:
(state.always_child_state, task) = self._get_next_task_from_state(state.always_child_state, host=host)
if self._check_failed_state(state.always_child_state):
state.always_child_state = None
self._set_failed_state(state)
else:
if task is None or state.always_child_state.run_state == IteratingStates.COMPLETE:
state.always_child_state = None
continue
else:
if state.cur_always_task >= len(block.always):
if state.fail_state != FailedStates.NONE:
state.run_state = IteratingStates.COMPLETE
else:
state.cur_block += 1
state.cur_regular_task = 0
state.cur_rescue_task = 0
state.cur_always_task = 0
state.run_state = IteratingStates.TASKS
state.tasks_child_state = None
state.rescue_child_state = None
state.always_child_state = None
state.did_rescue = False
else:
task = block.always[state.cur_always_task]
if isinstance(task, Block):
state.always_child_state = HostState(blocks=[task])
state.always_child_state.run_state = IteratingStates.TASKS
task = None
state.cur_always_task += 1
elif state.run_state == IteratingStates.HANDLERS:
if state.update_handlers:
# reset handlers for HostState since handlers from include_tasks
# might be there from previous flush
state.handlers = self.handlers[:]
state.update_handlers = False
state.cur_handlers_task = 0
if state.fail_state & FailedStates.HANDLERS == FailedStates.HANDLERS:
state.update_handlers = True
state.run_state = IteratingStates.COMPLETE
else:
while True:
try:
task = state.handlers[state.cur_handlers_task]
except IndexError:
task = None
state.run_state = state.pre_flushing_run_state
state.update_handlers = True
break
else:
state.cur_handlers_task += 1
if task.is_host_notified(host):
break
elif state.run_state == IteratingStates.COMPLETE:
return (state, None)
# if something above set the task, break out of the loop now
if task:
break
return (state, task)
def _set_failed_state(self, state):
if state.run_state == IteratingStates.SETUP:
state.fail_state |= FailedStates.SETUP
state.run_state = IteratingStates.COMPLETE
elif state.run_state == IteratingStates.TASKS:
if state.tasks_child_state is not None:
state.tasks_child_state = self._set_failed_state(state.tasks_child_state)
else:
state.fail_state |= FailedStates.TASKS
if state._blocks[state.cur_block].rescue:
state.run_state = IteratingStates.RESCUE
elif state._blocks[state.cur_block].always:
state.run_state = IteratingStates.ALWAYS
else:
state.run_state = IteratingStates.COMPLETE
elif state.run_state == IteratingStates.RESCUE:
if state.rescue_child_state is not None:
state.rescue_child_state = self._set_failed_state(state.rescue_child_state)
else:
state.fail_state |= FailedStates.RESCUE
if state._blocks[state.cur_block].always:
state.run_state = IteratingStates.ALWAYS
else:
state.run_state = IteratingStates.COMPLETE
elif state.run_state == IteratingStates.ALWAYS:
if state.always_child_state is not None:
state.always_child_state = self._set_failed_state(state.always_child_state)
else:
state.fail_state |= FailedStates.ALWAYS
state.run_state = IteratingStates.COMPLETE
elif state.run_state == IteratingStates.HANDLERS:
state.fail_state |= FailedStates.HANDLERS
state.update_handlers = True
if state._blocks[state.cur_block].rescue:
state.run_state = IteratingStates.RESCUE
elif state._blocks[state.cur_block].always:
state.run_state = IteratingStates.ALWAYS
else:
state.run_state = IteratingStates.COMPLETE
return state
def mark_host_failed(self, host):
s = self.get_host_state(host)
display.debug("marking host %s failed, current state: %s" % (host, s))
s = self._set_failed_state(s)
display.debug("^ failed state is now: %s" % s)
self.set_state_for_host(host.name, s)
self._play._removed_hosts.append(host.name)
def get_failed_hosts(self):
return dict((host, True) for (host, state) in self._host_states.items() if self._check_failed_state(state))
def _check_failed_state(self, state):
if state is None:
return False
elif state.run_state == IteratingStates.RESCUE and self._check_failed_state(state.rescue_child_state):
return True
elif state.run_state == IteratingStates.ALWAYS and self._check_failed_state(state.always_child_state):
return True
elif state.run_state == IteratingStates.HANDLERS and state.fail_state & FailedStates.HANDLERS == FailedStates.HANDLERS:
return True
elif state.fail_state != FailedStates.NONE:
if state.run_state == IteratingStates.RESCUE and state.fail_state & FailedStates.RESCUE == 0:
return False
elif state.run_state == IteratingStates.ALWAYS and state.fail_state & FailedStates.ALWAYS == 0:
return False
else:
return not (state.did_rescue and state.fail_state & FailedStates.ALWAYS == 0)
elif state.run_state == IteratingStates.TASKS and self._check_failed_state(state.tasks_child_state):
cur_block = state._blocks[state.cur_block]
if len(cur_block.rescue) > 0 and state.fail_state & FailedStates.RESCUE == 0:
return False
else:
return True
return False
def is_failed(self, host):
s = self.get_host_state(host)
return self._check_failed_state(s)
def clear_host_errors(self, host):
self._clear_state_errors(self.get_state_for_host(host.name))
def _clear_state_errors(self, state: HostState) -> None:
state.fail_state = FailedStates.NONE
if state.tasks_child_state is not None:
self._clear_state_errors(state.tasks_child_state)
elif state.rescue_child_state is not None:
self._clear_state_errors(state.rescue_child_state)
elif state.always_child_state is not None:
self._clear_state_errors(state.always_child_state)
def get_active_state(self, state):
'''
Finds the active state, recursively if necessary when there are child states.
'''
if state.run_state == IteratingStates.TASKS and state.tasks_child_state is not None:
return self.get_active_state(state.tasks_child_state)
elif state.run_state == IteratingStates.RESCUE and state.rescue_child_state is not None:
return self.get_active_state(state.rescue_child_state)
elif state.run_state == IteratingStates.ALWAYS and state.always_child_state is not None:
return self.get_active_state(state.always_child_state)
return state
def is_any_block_rescuing(self, state):
'''
Given the current HostState state, determines if the current block, or any child blocks,
are in rescue mode.
'''
if state.run_state == IteratingStates.TASKS and state.get_current_block().rescue:
return True
if state.tasks_child_state is not None:
return self.is_any_block_rescuing(state.tasks_child_state)
if state.rescue_child_state is not None:
return self.is_any_block_rescuing(state.rescue_child_state)
if state.always_child_state is not None:
return self.is_any_block_rescuing(state.always_child_state)
return False
def _insert_tasks_into_state(self, state, task_list):
# if we've failed at all, or if the task list is empty, just return the current state
if (state.fail_state != FailedStates.NONE and state.run_state == IteratingStates.TASKS) or not task_list:
return state
if state.run_state == IteratingStates.TASKS:
if state.tasks_child_state:
state.tasks_child_state = self._insert_tasks_into_state(state.tasks_child_state, task_list)
else:
target_block = state._blocks[state.cur_block].copy()
before = target_block.block[:state.cur_regular_task]
after = target_block.block[state.cur_regular_task:]
target_block.block = before + task_list + after
state._blocks[state.cur_block] = target_block
elif state.run_state == IteratingStates.RESCUE:
if state.rescue_child_state:
state.rescue_child_state = self._insert_tasks_into_state(state.rescue_child_state, task_list)
else:
target_block = state._blocks[state.cur_block].copy()
before = target_block.rescue[:state.cur_rescue_task]
after = target_block.rescue[state.cur_rescue_task:]
target_block.rescue = before + task_list + after
state._blocks[state.cur_block] = target_block
elif state.run_state == IteratingStates.ALWAYS:
if state.always_child_state:
state.always_child_state = self._insert_tasks_into_state(state.always_child_state, task_list)
else:
target_block = state._blocks[state.cur_block].copy()
before = target_block.always[:state.cur_always_task]
after = target_block.always[state.cur_always_task:]
target_block.always = before + task_list + after
state._blocks[state.cur_block] = target_block
elif state.run_state == IteratingStates.HANDLERS:
state.handlers[state.cur_handlers_task:state.cur_handlers_task] = [h for b in task_list for h in b.block]
return state
def add_tasks(self, host, task_list):
self.set_state_for_host(host.name, self._insert_tasks_into_state(self.get_host_state(host), task_list))
@property
def host_states(self):
return self._host_states
def get_state_for_host(self, hostname: str) -> HostState:
return self._host_states[hostname]
def set_state_for_host(self, hostname: str, state: HostState) -> None:
if not isinstance(state, HostState):
raise AnsibleAssertionError('Expected state to be a HostState but was a %s' % type(state))
self._host_states[hostname] = state
def set_run_state_for_host(self, hostname: str, run_state: IteratingStates) -> None:
if not isinstance(run_state, IteratingStates):
raise AnsibleAssertionError('Expected run_state to be a IteratingStates but was %s' % (type(run_state)))
self._host_states[hostname].run_state = run_state
def set_fail_state_for_host(self, hostname: str, fail_state: FailedStates) -> None:
if not isinstance(fail_state, FailedStates):
raise AnsibleAssertionError('Expected fail_state to be a FailedStates but was %s' % (type(fail_state)))
self._host_states[hostname].fail_state = fail_state
def add_notification(self, hostname: str, notification: str) -> None:
# preserve order
host_state = self._host_states[hostname]
if notification not in host_state.handler_notifications:
host_state.handler_notifications.append(notification)
def clear_notification(self, hostname: str, notification: str) -> None:
self._host_states[hostname].handler_notifications.remove(notification)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,532 |
Handler triggered in block does not run rescue/always tasks
|
### Summary
When handlers are run from within a block using `meta: flush_handlers` in the block, then tasks in the `rescue:` and `always:` sections are not executed.
If the block has a `rescue:` section then the failure of a handler triggered in the block will cause the host to be rescued, but the rescue task is not actually executed.
Whether the handlers are defined somewhere else in the play (outside of the block) or defined from within a role that was included in the block does not influence the behavior.
This was first noticed in Ansible 2.15.0.
In previous Ansible versions 2.12.5 and 2.9.10 this functions as expected.
The new behavior is possibly related to the changes introduced to address the following issues:
- #65067
- #52561
### Issue Type
Bug Report
### Component Name
blocks
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.3]
config file = /home/user/git/ansible-galaxy/ansible.cfg
configured module search path = ['/home/user/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/user/venv_3.9/lib/python3.9/site-packages/ansible
ansible collection location = /home/user/.ansible/collections:/usr/share/ansible/collections
executable location = /home/user/venv_3.9/bin/ansible
python version = 3.9.5 (default, Nov 23 2021, 15:27:38) [GCC 9.3.0] (/home/user/venv_3.9/bin/python3.9)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ANSIBLE_NOCOWS(/home/user/git/ansible-galaxy/ansible.cfg) = True
CACHE_PLUGIN(/home/user/git/ansible-galaxy/ansible.cfg) = memory
COLOR_CHANGED(/home/user/git/ansible-galaxy/ansible.cfg) = yellow
COLOR_DEBUG(/home/user/git/ansible-galaxy/ansible.cfg) = dark gray
COLOR_DEPRECATE(/home/user/git/ansible-galaxy/ansible.cfg) = purple
COLOR_DIFF_ADD(/home/user/git/ansible-galaxy/ansible.cfg) = green
COLOR_DIFF_LINES(/home/user/git/ansible-galaxy/ansible.cfg) = cyan
COLOR_DIFF_REMOVE(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_ERROR(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_HIGHLIGHT(/home/user/git/ansible-galaxy/ansible.cfg) = white
COLOR_OK(/home/user/git/ansible-galaxy/ansible.cfg) = green
COLOR_SKIP(/home/user/git/ansible-galaxy/ansible.cfg) = cyan
COLOR_UNREACHABLE(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_VERBOSE(/home/user/git/ansible-galaxy/ansible.cfg) = blue
COLOR_WARN(/home/user/git/ansible-galaxy/ansible.cfg) = bright purple
CONFIG_FILE() = /home/user/git/ansible-galaxy/ansible.cfg
DEFAULT_ASK_PASS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_BECOME(/home/user/git/ansible-galaxy/ansible.cfg) = True
DEFAULT_BECOME_ASK_PASS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_BECOME_METHOD(/home/user/git/ansible-galaxy/ansible.cfg) = 'sudo'
DEFAULT_BECOME_USER(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
DEFAULT_FORCE_HANDLERS(/home/user/git/ansible-galaxy/ansible.cfg) = True
DEFAULT_FORKS(/home/user/git/ansible-galaxy/ansible.cfg) = 40
DEFAULT_GATHERING(/home/user/git/ansible-galaxy/ansible.cfg) = implicit
DEFAULT_LOAD_CALLBACK_PLUGINS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_MANAGED_STR(/home/user/git/ansible-galaxy/ansible.cfg) = %Y-%m-%d %H:%M
DEFAULT_MODULE_COMPRESSION(/home/user/git/ansible-galaxy/ansible.cfg) = 'ZIP_DEFLATED'
DEFAULT_MODULE_NAME(/home/user/git/ansible-galaxy/ansible.cfg) = command
DEFAULT_POLL_INTERVAL(/home/user/git/ansible-galaxy/ansible.cfg) = 15
DEFAULT_REMOTE_PORT(/home/user/git/ansible-galaxy/ansible.cfg) = 22
DEFAULT_REMOTE_USER(/home/user/git/ansible-galaxy/ansible.cfg) = user
DEFAULT_ROLES_PATH(/home/user/git/ansible-galaxy/ansible.cfg) = ['/home/user/git/ansible-galaxy/roles', '/home/user/git/ansible-galaxy/galaxy']
DEFAULT_TIMEOUT(/home/user/git/ansible-galaxy/ansible.cfg) = 20
DEFAULT_TRANSPORT(/home/user/git/ansible-galaxy/ansible.cfg) = smart
DEPRECATION_WARNINGS(/home/user/git/ansible-galaxy/ansible.cfg) = True
EDITOR(env: EDITOR) = vim
HOST_KEY_CHECKING(/home/user/git/ansible-galaxy/ansible.cfg) = False
MAX_FILE_SIZE_FOR_DIFF(/home/user/git/ansible-galaxy/ansible.cfg) = 1048576
RETRY_FILES_ENABLED(/home/user/git/ansible-galaxy/ansible.cfg) = False
SHOW_CUSTOM_STATS(/home/user/git/ansible-galaxy/ansible.cfg) = True
SYSTEM_WARNINGS(/home/user/git/ansible-galaxy/ansible.cfg) = True
BECOME:
======
runas:
_____
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
su:
__
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
sudo:
____
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
CALLBACK:
========
default:
_______
show_custom_stats(/home/user/git/ansible-galaxy/ansible.cfg) = True
CONNECTION:
==========
paramiko_ssh:
____________
host_key_checking(/home/user/git/ansible-galaxy/ansible.cfg) = False
port(/home/user/git/ansible-galaxy/ansible.cfg) = 22
pty(/home/user/git/ansible-galaxy/ansible.cfg) = False
remote_user(/home/user/git/ansible-galaxy/ansible.cfg) = user
ssh_args(/home/user/git/ansible-galaxy/ansible.cfg) = -o PasswordAuthentication=no -o ControlMaster=auto -o ControlPersist=60s
timeout(/home/user/git/ansible-galaxy/ansible.cfg) = 20
ssh:
___
control_path(/home/user/git/ansible-galaxy/ansible.cfg) = %(directory)s/ansi-%%h-%%p-%%r
host_key_checking(/home/user/git/ansible-galaxy/ansible.cfg) = False
pipelining(/home/user/git/ansible-galaxy/ansible.cfg) = True
port(/home/user/git/ansible-galaxy/ansible.cfg) = 22
remote_user(/home/user/git/ansible-galaxy/ansible.cfg) = user
scp_if_ssh(/home/user/git/ansible-galaxy/ansible.cfg) = False
sftp_batch_mode(/home/user/git/ansible-galaxy/ansible.cfg) = False
ssh_args(/home/user/git/ansible-galaxy/ansible.cfg) = -o PasswordAuthentication=no -o ControlMaster=auto -o ControlPersist=60s
timeout(/home/user/git/ansible-galaxy/ansible.cfg) = 20
SHELL:
=====
sh:
__
remote_tmp(/home/user/git/ansible-galaxy/ansible.cfg) = $HOME/.ansible/tmp
world_readable_temp(/home/user/git/ansible-galaxy/ansible.cfg) = False
```
### OS / Environment
RHEL7/8/9
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Demo handler in block rescue/always error
become: false
gather_facts: false
hosts: all
tasks:
- block:
- name: Block debug
debug:
msg: "debug 1 - notify handler in block"
changed_when: True
notify: Handler
- meta: flush_handlers
rescue:
- name: Rescue debug
debug:
msg: "debug 2 - rescue failed hosts"
always:
- name: Always debug
debug:
msg: "debug 3 - run on all hosts"
handlers:
- name: Handler
fail:
when: inventory_hostname == 'host1'
```
### Expected Results
The handler triggered in the block should fail on host1.
The rescue task should run on host1 and the host should be marked as rescued instead of failed.
The always task should run on all hosts (also on the failed host1).
```console
# Ansible 2.12.5
$ ansible-playbook test.yml -i inventory.yml -l host1,host2 -D
PLAY [Demo handler in block rescue/always error] ******************************************************
TASK [Block debug] ************************************************************************************
changed: [host1] => {
"msg": "debug 1 - notify handler in block"
}
changed: [host2] => {
"msg": "debug 1 - notify handler in block"
}
TASK [meta] *******************************************************************************************
RUNNING HANDLER [Handler] *****************************************************************************
fatal: [host1]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
skipping: [host2]
TASK [Rescue debug] ***********************************************************************************
ok: [host1] => {
"msg": "debug 2 - rescue failed hosts"
}
TASK [Always debug] ***********************************************************************************
ok: [host1] => {
"msg": "debug 3 - run on all hosts"
}
ok: [host2] => {
"msg": "debug 3 - run on all hosts"
}
PLAY RECAP ********************************************************************************************
host1 : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=1 ignored=0
host2 : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0
```
### Actual Results
```console
# Ansible 2.15.3
$ ansible-playbook test.yml -i inventory.yml -l host1,host2 -D
PLAY [Demo handler in block rescue/always error] ******************************************************
TASK [Block debug] ************************************************************************************
changed: [host1] => {
"msg": "debug 1 - notify handler in block"
}
changed: [host2] => {
"msg": "debug 1 - notify handler in block"
}
TASK [meta] *******************************************************************************************
TASK [meta] *******************************************************************************************
RUNNING HANDLER [Handler] *****************************************************************************
fatal: [host1]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
skipping: [host2]
TASK [Always debug] ***********************************************************************************
ok: [host2] => {
"msg": "debug 3 - run on all hosts"
}
PLAY RECAP ********************************************************************************************
host1 : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=1 ignored=0
host2 : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81532
|
https://github.com/ansible/ansible/pull/81572
|
9c09ed73928272f898d18a2eada21f7357b418e4
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
| 2023-08-17T14:50:08Z |
python
| 2023-11-13T08:57:43Z |
test/integration/targets/handlers/nested_flush_handlers_failure_force.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,532 |
Handler triggered in block does not run rescue/always tasks
|
### Summary
When handlers are run from within a block using `meta: flush_handlers` in the block, then tasks in the `rescue:` and `always:` sections are not executed.
If the block has a `rescue:` section then the failure of a handler triggered in the block will cause the host to be rescued, but the rescue task is not actually executed.
Whether the handlers are defined somewhere else in the play (outside of the block) or defined from within a role that was included in the block does not influence the behavior.
This was first noticed in Ansible 2.15.0.
In previous Ansible versions 2.12.5 and 2.9.10 this functions as expected.
The new behavior is possibly related to the changes introduced to address the following issues:
- #65067
- #52561
### Issue Type
Bug Report
### Component Name
blocks
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.3]
config file = /home/user/git/ansible-galaxy/ansible.cfg
configured module search path = ['/home/user/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/user/venv_3.9/lib/python3.9/site-packages/ansible
ansible collection location = /home/user/.ansible/collections:/usr/share/ansible/collections
executable location = /home/user/venv_3.9/bin/ansible
python version = 3.9.5 (default, Nov 23 2021, 15:27:38) [GCC 9.3.0] (/home/user/venv_3.9/bin/python3.9)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ANSIBLE_NOCOWS(/home/user/git/ansible-galaxy/ansible.cfg) = True
CACHE_PLUGIN(/home/user/git/ansible-galaxy/ansible.cfg) = memory
COLOR_CHANGED(/home/user/git/ansible-galaxy/ansible.cfg) = yellow
COLOR_DEBUG(/home/user/git/ansible-galaxy/ansible.cfg) = dark gray
COLOR_DEPRECATE(/home/user/git/ansible-galaxy/ansible.cfg) = purple
COLOR_DIFF_ADD(/home/user/git/ansible-galaxy/ansible.cfg) = green
COLOR_DIFF_LINES(/home/user/git/ansible-galaxy/ansible.cfg) = cyan
COLOR_DIFF_REMOVE(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_ERROR(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_HIGHLIGHT(/home/user/git/ansible-galaxy/ansible.cfg) = white
COLOR_OK(/home/user/git/ansible-galaxy/ansible.cfg) = green
COLOR_SKIP(/home/user/git/ansible-galaxy/ansible.cfg) = cyan
COLOR_UNREACHABLE(/home/user/git/ansible-galaxy/ansible.cfg) = red
COLOR_VERBOSE(/home/user/git/ansible-galaxy/ansible.cfg) = blue
COLOR_WARN(/home/user/git/ansible-galaxy/ansible.cfg) = bright purple
CONFIG_FILE() = /home/user/git/ansible-galaxy/ansible.cfg
DEFAULT_ASK_PASS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_BECOME(/home/user/git/ansible-galaxy/ansible.cfg) = True
DEFAULT_BECOME_ASK_PASS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_BECOME_METHOD(/home/user/git/ansible-galaxy/ansible.cfg) = 'sudo'
DEFAULT_BECOME_USER(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
DEFAULT_FORCE_HANDLERS(/home/user/git/ansible-galaxy/ansible.cfg) = True
DEFAULT_FORKS(/home/user/git/ansible-galaxy/ansible.cfg) = 40
DEFAULT_GATHERING(/home/user/git/ansible-galaxy/ansible.cfg) = implicit
DEFAULT_LOAD_CALLBACK_PLUGINS(/home/user/git/ansible-galaxy/ansible.cfg) = False
DEFAULT_MANAGED_STR(/home/user/git/ansible-galaxy/ansible.cfg) = %Y-%m-%d %H:%M
DEFAULT_MODULE_COMPRESSION(/home/user/git/ansible-galaxy/ansible.cfg) = 'ZIP_DEFLATED'
DEFAULT_MODULE_NAME(/home/user/git/ansible-galaxy/ansible.cfg) = command
DEFAULT_POLL_INTERVAL(/home/user/git/ansible-galaxy/ansible.cfg) = 15
DEFAULT_REMOTE_PORT(/home/user/git/ansible-galaxy/ansible.cfg) = 22
DEFAULT_REMOTE_USER(/home/user/git/ansible-galaxy/ansible.cfg) = user
DEFAULT_ROLES_PATH(/home/user/git/ansible-galaxy/ansible.cfg) = ['/home/user/git/ansible-galaxy/roles', '/home/user/git/ansible-galaxy/galaxy']
DEFAULT_TIMEOUT(/home/user/git/ansible-galaxy/ansible.cfg) = 20
DEFAULT_TRANSPORT(/home/user/git/ansible-galaxy/ansible.cfg) = smart
DEPRECATION_WARNINGS(/home/user/git/ansible-galaxy/ansible.cfg) = True
EDITOR(env: EDITOR) = vim
HOST_KEY_CHECKING(/home/user/git/ansible-galaxy/ansible.cfg) = False
MAX_FILE_SIZE_FOR_DIFF(/home/user/git/ansible-galaxy/ansible.cfg) = 1048576
RETRY_FILES_ENABLED(/home/user/git/ansible-galaxy/ansible.cfg) = False
SHOW_CUSTOM_STATS(/home/user/git/ansible-galaxy/ansible.cfg) = True
SYSTEM_WARNINGS(/home/user/git/ansible-galaxy/ansible.cfg) = True
BECOME:
======
runas:
_____
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
su:
__
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
sudo:
____
become_user(/home/user/git/ansible-galaxy/ansible.cfg) = 'root'
CALLBACK:
========
default:
_______
show_custom_stats(/home/user/git/ansible-galaxy/ansible.cfg) = True
CONNECTION:
==========
paramiko_ssh:
____________
host_key_checking(/home/user/git/ansible-galaxy/ansible.cfg) = False
port(/home/user/git/ansible-galaxy/ansible.cfg) = 22
pty(/home/user/git/ansible-galaxy/ansible.cfg) = False
remote_user(/home/user/git/ansible-galaxy/ansible.cfg) = user
ssh_args(/home/user/git/ansible-galaxy/ansible.cfg) = -o PasswordAuthentication=no -o ControlMaster=auto -o ControlPersist=60s
timeout(/home/user/git/ansible-galaxy/ansible.cfg) = 20
ssh:
___
control_path(/home/user/git/ansible-galaxy/ansible.cfg) = %(directory)s/ansi-%%h-%%p-%%r
host_key_checking(/home/user/git/ansible-galaxy/ansible.cfg) = False
pipelining(/home/user/git/ansible-galaxy/ansible.cfg) = True
port(/home/user/git/ansible-galaxy/ansible.cfg) = 22
remote_user(/home/user/git/ansible-galaxy/ansible.cfg) = user
scp_if_ssh(/home/user/git/ansible-galaxy/ansible.cfg) = False
sftp_batch_mode(/home/user/git/ansible-galaxy/ansible.cfg) = False
ssh_args(/home/user/git/ansible-galaxy/ansible.cfg) = -o PasswordAuthentication=no -o ControlMaster=auto -o ControlPersist=60s
timeout(/home/user/git/ansible-galaxy/ansible.cfg) = 20
SHELL:
=====
sh:
__
remote_tmp(/home/user/git/ansible-galaxy/ansible.cfg) = $HOME/.ansible/tmp
world_readable_temp(/home/user/git/ansible-galaxy/ansible.cfg) = False
```
### OS / Environment
RHEL7/8/9
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Demo handler in block rescue/always error
become: false
gather_facts: false
hosts: all
tasks:
- block:
- name: Block debug
debug:
msg: "debug 1 - notify handler in block"
changed_when: True
notify: Handler
- meta: flush_handlers
rescue:
- name: Rescue debug
debug:
msg: "debug 2 - rescue failed hosts"
always:
- name: Always debug
debug:
msg: "debug 3 - run on all hosts"
handlers:
- name: Handler
fail:
when: inventory_hostname == 'host1'
```
### Expected Results
The handler triggered in the block should fail on host1.
The rescue task should run on host1 and the host should be marked as rescued instead of failed.
The always task should run on all hosts (also on the failed host1).
```console
# Ansible 2.12.5
$ ansible-playbook test.yml -i inventory.yml -l host1,host2 -D
PLAY [Demo handler in block rescue/always error] ******************************************************
TASK [Block debug] ************************************************************************************
changed: [host1] => {
"msg": "debug 1 - notify handler in block"
}
changed: [host2] => {
"msg": "debug 1 - notify handler in block"
}
TASK [meta] *******************************************************************************************
RUNNING HANDLER [Handler] *****************************************************************************
fatal: [host1]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
skipping: [host2]
TASK [Rescue debug] ***********************************************************************************
ok: [host1] => {
"msg": "debug 2 - rescue failed hosts"
}
TASK [Always debug] ***********************************************************************************
ok: [host1] => {
"msg": "debug 3 - run on all hosts"
}
ok: [host2] => {
"msg": "debug 3 - run on all hosts"
}
PLAY RECAP ********************************************************************************************
host1 : ok=3 changed=1 unreachable=0 failed=0 skipped=0 rescued=1 ignored=0
host2 : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0
```
### Actual Results
```console
# Ansible 2.15.3
$ ansible-playbook test.yml -i inventory.yml -l host1,host2 -D
PLAY [Demo handler in block rescue/always error] ******************************************************
TASK [Block debug] ************************************************************************************
changed: [host1] => {
"msg": "debug 1 - notify handler in block"
}
changed: [host2] => {
"msg": "debug 1 - notify handler in block"
}
TASK [meta] *******************************************************************************************
TASK [meta] *******************************************************************************************
RUNNING HANDLER [Handler] *****************************************************************************
fatal: [host1]: FAILED! => {"changed": false, "msg": "Failed as requested from task"}
skipping: [host2]
TASK [Always debug] ***********************************************************************************
ok: [host2] => {
"msg": "debug 3 - run on all hosts"
}
PLAY RECAP ********************************************************************************************
host1 : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=1 ignored=0
host2 : ok=2 changed=1 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81532
|
https://github.com/ansible/ansible/pull/81572
|
9c09ed73928272f898d18a2eada21f7357b418e4
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
| 2023-08-17T14:50:08Z |
python
| 2023-11-13T08:57:43Z |
test/integration/targets/handlers/runme.sh
|
#!/usr/bin/env bash
set -eux
export ANSIBLE_FORCE_HANDLERS
ANSIBLE_FORCE_HANDLERS=false
# simple handler test
ansible-playbook test_handlers.yml -i inventory.handlers -v "$@" --tags scenario1
# simple from_handlers test
ansible-playbook from_handlers.yml -i inventory.handlers -v "$@" --tags scenario1
ansible-playbook test_listening_handlers.yml -i inventory.handlers -v "$@"
[ "$(ansible-playbook test_handlers.yml -i inventory.handlers -v "$@" --tags scenario2 -l A \
| grep -E -o 'RUNNING HANDLER \[test_handlers : .*]')" = "RUNNING HANDLER [test_handlers : test handler]" ]
# Test forcing handlers using the linear and free strategy
for strategy in linear free; do
export ANSIBLE_STRATEGY=$strategy
# Not forcing, should only run on successful host
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_B" ]
# Forcing from command line
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing from command line, should only run later tasks on unfailed hosts
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers \
| grep -E -o CALLED_TASK_. | sort | uniq | xargs)" = "CALLED_TASK_B CALLED_TASK_D CALLED_TASK_E" ]
# Forcing from command line, should call handlers even if all hosts fail
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers -e fail_all=yes \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing from ansible.cfg
[ "$(ANSIBLE_FORCE_HANDLERS=true ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing true in play
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags force_true_in_play \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing false in play, which overrides command line
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags force_false_in_play --force-handlers \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_B" ]
# https://github.com/ansible/ansible/pull/80898
[ "$(ansible-playbook 80880.yml -i inventory.handlers -vv "$@" 2>&1)" ]
unset ANSIBLE_STRATEGY
done
[ "$(ansible-playbook test_handlers_include.yml -i ../../inventory -v "$@" --tags playbook_include_handlers \
| grep -E -o 'RUNNING HANDLER \[.*]')" = "RUNNING HANDLER [test handler]" ]
[ "$(ansible-playbook test_handlers_include.yml -i ../../inventory -v "$@" --tags role_include_handlers \
| grep -E -o 'RUNNING HANDLER \[test_handlers_include : .*]')" = "RUNNING HANDLER [test_handlers_include : test handler]" ]
[ "$(ansible-playbook test_handlers_include_role.yml -i ../../inventory -v "$@" \
| grep -E -o 'RUNNING HANDLER \[test_handlers_include_role : .*]')" = "RUNNING HANDLER [test_handlers_include_role : test handler]" ]
# Notify handler listen
ansible-playbook test_handlers_listen.yml -i inventory.handlers -v "$@"
# Notify inexistent handlers results in error
set +e
result="$(ansible-playbook test_handlers_inexistent_notify.yml -i inventory.handlers "$@" 2>&1)"
set -e
grep -q "ERROR! The requested handler 'notify_inexistent_handler' was not found in either the main handlers list nor in the listening handlers list" <<< "$result"
# Notify inexistent handlers without errors when ANSIBLE_ERROR_ON_MISSING_HANDLER=false
ANSIBLE_ERROR_ON_MISSING_HANDLER=false ansible-playbook test_handlers_inexistent_notify.yml -i inventory.handlers -v "$@"
ANSIBLE_ERROR_ON_MISSING_HANDLER=false ansible-playbook test_templating_in_handlers.yml -v "$@"
# https://github.com/ansible/ansible/issues/36649
output_dir=/tmp
set +e
result="$(ansible-playbook test_handlers_any_errors_fatal.yml -e output_dir=$output_dir -i inventory.handlers -v "$@" 2>&1)"
set -e
[ ! -f $output_dir/should_not_exist_B ] || (rm -f $output_dir/should_not_exist_B && exit 1)
# https://github.com/ansible/ansible/issues/47287
[ "$(ansible-playbook test_handlers_including_task.yml -i ../../inventory -v "$@" | grep -E -o 'failed=[0-9]+')" = "failed=0" ]
# https://github.com/ansible/ansible/issues/71222
ansible-playbook test_role_handlers_including_tasks.yml -i ../../inventory -v "$@"
# https://github.com/ansible/ansible/issues/27237
set +e
result="$(ansible-playbook test_handlers_template_run_once.yml -i inventory.handlers "$@" 2>&1)"
set -e
grep -q "handler A" <<< "$result"
grep -q "handler B" <<< "$result"
# Test an undefined variable in another handler name isn't a failure
ansible-playbook 58841.yml "$@" --tags lazy_evaluation 2>&1 | tee out.txt ; cat out.txt
grep out.txt -e "\[WARNING\]: Handler 'handler name with {{ test_var }}' is unusable"
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "0" ]
# Test templating a handler name with a defined variable
ansible-playbook 58841.yml "$@" --tags evaluation_time -e test_var=myvar | tee out.txt ; cat out.txt
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "1" ]
# Test the handler is not found when the variable is undefined
ansible-playbook 58841.yml "$@" --tags evaluation_time 2>&1 | tee out.txt ; cat out.txt
grep out.txt -e "ERROR! The requested handler 'handler name with myvar' was not found"
grep out.txt -e "\[WARNING\]: Handler 'handler name with {{ test_var }}' is unusable"
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "0" ]
# Test include_role and import_role cannot be used as handlers
ansible-playbook test_role_as_handler.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using 'include_role' as a handler is not supported."
# Test notifying a handler from within include_tasks does not work anymore
ansible-playbook test_notify_included.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'I was included')" = "1" ]
grep out.txt -e "ERROR! The requested handler 'handler_from_include' was not found in either the main handlers list nor in the listening handlers list"
ansible-playbook test_handlers_meta.yml -i inventory.handlers -vv "$@" | tee out.txt
[ "$(grep out.txt -ce 'RUNNING HANDLER \[noop_handler\]')" = "1" ]
[ "$(grep out.txt -ce 'META: noop')" = "1" ]
# https://github.com/ansible/ansible/issues/46447
set +e
test "$(ansible-playbook 46447.yml -i inventory.handlers -vv "$@" 2>&1 | grep -c 'SHOULD NOT GET HERE')"
set -e
# https://github.com/ansible/ansible/issues/52561
ansible-playbook 52561.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler1 ran')" = "1" ]
# Test flush_handlers meta task does not imply any_errors_fatal
ansible-playbook 54991.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "4" ]
ansible-playbook order.yml -i inventory.handlers "$@" 2>&1
set +e
ansible-playbook order.yml --force-handlers -e test_force_handlers=true -i inventory.handlers "$@" 2>&1
set -e
ansible-playbook include_handlers_fail_force.yml --force-handlers -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'included handler ran')" = "1" ]
ansible-playbook test_flush_handlers_as_handler.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! flush_handlers cannot be used as a handler"
ansible-playbook test_skip_flush.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
ansible-playbook test_flush_in_rescue_always.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran in rescue')" = "1" ]
[ "$(grep out.txt -ce 'handler ran in always')" = "2" ]
[ "$(grep out.txt -ce 'lockstep works')" = "2" ]
ansible-playbook test_handlers_infinite_loop.yml -i inventory.handlers "$@" 2>&1
ansible-playbook test_flush_handlers_rescue_always.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'rescue ran')" = "1" ]
[ "$(grep out.txt -ce 'always ran')" = "2" ]
[ "$(grep out.txt -ce 'should run for both hosts')" = "2" ]
ansible-playbook test_fqcn_meta_flush_handlers.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
grep out.txt -e "handler ran"
grep out.txt -e "after flush"
ansible-playbook 79776.yml -i inventory.handlers "$@"
ansible-playbook test_block_as_handler.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_block_as_handler-include.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_block_as_handler-import.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_include_role_handler_once.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
ansible-playbook test_listen_role_dedup.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'a handler from a role')" = "1" ]
ansible localhost -m include_role -a "name=r1-dep_chain-vars" "$@"
ansible-playbook test_include_tasks_in_include_role.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
ansible-playbook test_run_once.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran once')" = "1" ]
ansible-playbook force_handlers_blocks_81533-1.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'task1')" = "1" ]
[ "$(grep out.txt -ce 'task2')" = "1" ]
[ "$(grep out.txt -ce 'hosts_left')" = "1" ]
ansible-playbook force_handlers_blocks_81533-2.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'hosts_left')" = "1" ]
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,142 |
Cannot use ansible.builtin.copy with /bin/ansible-config list -c %s
|
### Summary
I should be able to manage the `/etc/ansible/ansible.cfg` file on my ansible control host with ansible. So, when I try this:
```
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
the validation fails because the file indicated by %s does not have a .cfg extension.
### Issue Type
Bug Report
### Component Name
copy
### Ansible Version
```console
$ ansible --version
ansible [core 2.14.10]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/steve/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/steve/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.5 (main, Aug 28 2023, 00:00:00) [GCC 13.2.1 20230728 (Red Hat 13.2.1-1)] (/usr/bin/python3)
jinja version = 3.0.3
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = /etc/ansible/ansible.cfg
```
### OS / Environment
Fedora 38
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
### Expected Results
I expected ansible-config to behave similarly to ansible-inventory, or, failing that to (a) have a different return code for the file extension "error" (not 5) and to test the content before the extension.
### Actual Results
```console
[steve@jabberwock ~]$ ansible-playbook -i shared/ansible/server_IaC/files/arthur/etc/ansible/hosts temp.yml
PLAY [arthur] ****************************************************************************************************************************
TASK [Install ansible hosts and ansible.cfg] *********************************************************************************************
ok: [arthur] => (item={'file': 'hosts', 'validate': '/bin/ansible-inventory --list --inventory %s'})
failed: [arthur] (item={'file': 'ansible.cfg', 'validate': '/bin/ansible-config list -c %s'}) => {"ansible_loop_var": "item", "changed": false, "checksum": "d17e17e9639b5df25890b6ecbab867b9e329f40f", "exit_status": 5, "item": {"file": "ansible.cfg", "validate": "/bin/ansible-config list -c %s"}, "msg": "failed to validate", "stderr": "ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: \n", "stderr_lines": ["ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: "], "stdout": "usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...\n\nView ansible configuration.\n\npositional arguments:\n {list,dump,view,init}\n list Print all config options\n dump Dump configuration\n view View configuration file\n init Create initial configuration\n\noptions:\n --version show program's version number, config file location,\n configured module search path, module location,\n executable location and exit\n -h, --help show this help message and exit\n -v, --verbose Causes Ansible to print more debug messages. Adding\n multiple -v will increase the verbosity, the builtin\n plugins currently evaluate up to -vvvvvv. A reasonable\n level to start is -vvv, connection debugging might\n require -vvvv.\n", "stdout_lines": ["usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...", "", "View ansible configuration.", "", "positional arguments:", " {list,dump,view,init}", " list Print all config options", " dump Dump configuration", " view View configuration file", " init Create initial configuration", "", "options:", " --version show program's version number, config file location,", " configured module search path, module location,", " executable location and exit", " -h, --help show this help message and exit", " -v, --verbose Causes Ansible to print more debug messages. Adding", " multiple -v will increase the verbosity, the builtin", " plugins currently evaluate up to -vvvvvv. A reasonable", " level to start is -vvv, connection debugging might", " require -vvvv."]}
PLAY RECAP *******************************************************************************************************************************
arthur : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82142
|
https://github.com/ansible/ansible/pull/82158
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
|
4a84a9b3db47028c621d04cda8b2d3a3190173cd
| 2023-11-06T17:39:49Z |
python
| 2023-11-13T15:03:58Z |
changelogs/fragments/copy_keep_suffix_temp.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,142 |
Cannot use ansible.builtin.copy with /bin/ansible-config list -c %s
|
### Summary
I should be able to manage the `/etc/ansible/ansible.cfg` file on my ansible control host with ansible. So, when I try this:
```
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
the validation fails because the file indicated by %s does not have a .cfg extension.
### Issue Type
Bug Report
### Component Name
copy
### Ansible Version
```console
$ ansible --version
ansible [core 2.14.10]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/steve/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/steve/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.5 (main, Aug 28 2023, 00:00:00) [GCC 13.2.1 20230728 (Red Hat 13.2.1-1)] (/usr/bin/python3)
jinja version = 3.0.3
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = /etc/ansible/ansible.cfg
```
### OS / Environment
Fedora 38
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
### Expected Results
I expected ansible-config to behave similarly to ansible-inventory, or, failing that to (a) have a different return code for the file extension "error" (not 5) and to test the content before the extension.
### Actual Results
```console
[steve@jabberwock ~]$ ansible-playbook -i shared/ansible/server_IaC/files/arthur/etc/ansible/hosts temp.yml
PLAY [arthur] ****************************************************************************************************************************
TASK [Install ansible hosts and ansible.cfg] *********************************************************************************************
ok: [arthur] => (item={'file': 'hosts', 'validate': '/bin/ansible-inventory --list --inventory %s'})
failed: [arthur] (item={'file': 'ansible.cfg', 'validate': '/bin/ansible-config list -c %s'}) => {"ansible_loop_var": "item", "changed": false, "checksum": "d17e17e9639b5df25890b6ecbab867b9e329f40f", "exit_status": 5, "item": {"file": "ansible.cfg", "validate": "/bin/ansible-config list -c %s"}, "msg": "failed to validate", "stderr": "ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: \n", "stderr_lines": ["ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: "], "stdout": "usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...\n\nView ansible configuration.\n\npositional arguments:\n {list,dump,view,init}\n list Print all config options\n dump Dump configuration\n view View configuration file\n init Create initial configuration\n\noptions:\n --version show program's version number, config file location,\n configured module search path, module location,\n executable location and exit\n -h, --help show this help message and exit\n -v, --verbose Causes Ansible to print more debug messages. Adding\n multiple -v will increase the verbosity, the builtin\n plugins currently evaluate up to -vvvvvv. A reasonable\n level to start is -vvv, connection debugging might\n require -vvvv.\n", "stdout_lines": ["usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...", "", "View ansible configuration.", "", "positional arguments:", " {list,dump,view,init}", " list Print all config options", " dump Dump configuration", " view View configuration file", " init Create initial configuration", "", "options:", " --version show program's version number, config file location,", " configured module search path, module location,", " executable location and exit", " -h, --help show this help message and exit", " -v, --verbose Causes Ansible to print more debug messages. Adding", " multiple -v will increase the verbosity, the builtin", " plugins currently evaluate up to -vvvvvv. A reasonable", " level to start is -vvv, connection debugging might", " require -vvvv."]}
PLAY RECAP *******************************************************************************************************************************
arthur : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82142
|
https://github.com/ansible/ansible/pull/82158
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
|
4a84a9b3db47028c621d04cda8b2d3a3190173cd
| 2023-11-06T17:39:49Z |
python
| 2023-11-13T15:03:58Z |
lib/ansible/plugins/action/copy.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2017 Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import json
import os
import os.path
import stat
import tempfile
import traceback
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum
# Supplement the FILE_COMMON_ARGUMENTS with arguments that are specific to file
REAL_FILE_ARGS = frozenset(FILE_COMMON_ARGUMENTS.keys()).union(
('state', 'path', '_original_basename', 'recurse', 'force',
'_diff_peek', 'src'))
def _create_remote_file_args(module_args):
"""remove keys that are not relevant to file"""
return dict((k, v) for k, v in module_args.items() if k in REAL_FILE_ARGS)
def _create_remote_copy_args(module_args):
"""remove action plugin only keys"""
return dict((k, v) for k, v in module_args.items() if k not in ('content', 'decrypt'))
def _walk_dirs(topdir, base_path=None, local_follow=False, trailing_slash_detector=None):
"""
Walk a filesystem tree returning enough information to copy the files
:arg topdir: The directory that the filesystem tree is rooted at
:kwarg base_path: The initial directory structure to strip off of the
files for the destination directory. If this is None (the default),
the base_path is set to ``top_dir``.
:kwarg local_follow: Whether to follow symlinks on the source. When set
to False, no symlinks are dereferenced. When set to True (the
default), the code will dereference most symlinks. However, symlinks
can still be present if needed to break a circular link.
:kwarg trailing_slash_detector: Function to determine if a path has
a trailing directory separator. Only needed when dealing with paths on
a remote machine (in which case, pass in a function that is aware of the
directory separator conventions on the remote machine).
:returns: dictionary of tuples. All of the path elements in the structure are text strings.
This separates all the files, directories, and symlinks along with
important information about each::
{ 'files': [('/absolute/path/to/copy/from', 'relative/path/to/copy/to'), ...],
'directories': [('/absolute/path/to/copy/from', 'relative/path/to/copy/to'), ...],
'symlinks': [('/symlink/target/path', 'relative/path/to/copy/to'), ...],
}
The ``symlinks`` field is only populated if ``local_follow`` is set to False
*or* a circular symlink cannot be dereferenced.
"""
# Convert the path segments into byte strings
r_files = {'files': [], 'directories': [], 'symlinks': []}
def _recurse(topdir, rel_offset, parent_dirs, rel_base=u''):
"""
This is a closure (function utilizing variables from it's parent
function's scope) so that we only need one copy of all the containers.
Note that this function uses side effects (See the Variables used from
outer scope).
:arg topdir: The directory we are walking for files
:arg rel_offset: Integer defining how many characters to strip off of
the beginning of a path
:arg parent_dirs: Directories that we're copying that this directory is in.
:kwarg rel_base: String to prepend to the path after ``rel_offset`` is
applied to form the relative path.
Variables used from the outer scope
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:r_files: Dictionary of files in the hierarchy. See the return value
for :func:`walk` for the structure of this dictionary.
:local_follow: Read-only inside of :func:`_recurse`. Whether to follow symlinks
"""
for base_path, sub_folders, files in os.walk(topdir):
for filename in files:
filepath = os.path.join(base_path, filename)
dest_filepath = os.path.join(rel_base, filepath[rel_offset:])
if os.path.islink(filepath):
# Dereference the symlnk
real_file = os.path.realpath(filepath)
if local_follow and os.path.isfile(real_file):
# Add the file pointed to by the symlink
r_files['files'].append((real_file, dest_filepath))
else:
# Mark this file as a symlink to copy
r_files['symlinks'].append((os.readlink(filepath), dest_filepath))
else:
# Just a normal file
r_files['files'].append((filepath, dest_filepath))
for dirname in sub_folders:
dirpath = os.path.join(base_path, dirname)
dest_dirpath = os.path.join(rel_base, dirpath[rel_offset:])
real_dir = os.path.realpath(dirpath)
dir_stats = os.stat(real_dir)
if os.path.islink(dirpath):
if local_follow:
if (dir_stats.st_dev, dir_stats.st_ino) in parent_dirs:
# Just insert the symlink if the target directory
# exists inside of the copy already
r_files['symlinks'].append((os.readlink(dirpath), dest_dirpath))
else:
# Walk the dirpath to find all parent directories.
new_parents = set()
parent_dir_list = os.path.dirname(dirpath).split(os.path.sep)
for parent in range(len(parent_dir_list), 0, -1):
parent_stat = os.stat(u'/'.join(parent_dir_list[:parent]))
if (parent_stat.st_dev, parent_stat.st_ino) in parent_dirs:
# Reached the point at which the directory
# tree is already known. Don't add any
# more or we might go to an ancestor that
# isn't being copied.
break
new_parents.add((parent_stat.st_dev, parent_stat.st_ino))
if (dir_stats.st_dev, dir_stats.st_ino) in new_parents:
# This was a a circular symlink. So add it as
# a symlink
r_files['symlinks'].append((os.readlink(dirpath), dest_dirpath))
else:
# Walk the directory pointed to by the symlink
r_files['directories'].append((real_dir, dest_dirpath))
offset = len(real_dir) + 1
_recurse(real_dir, offset, parent_dirs.union(new_parents), rel_base=dest_dirpath)
else:
# Add the symlink to the destination
r_files['symlinks'].append((os.readlink(dirpath), dest_dirpath))
else:
# Just a normal directory
r_files['directories'].append((dirpath, dest_dirpath))
# Check if the source ends with a "/" so that we know which directory
# level to work at (similar to rsync)
source_trailing_slash = False
if trailing_slash_detector:
source_trailing_slash = trailing_slash_detector(topdir)
else:
source_trailing_slash = topdir.endswith(os.path.sep)
# Calculate the offset needed to strip the base_path to make relative
# paths
if base_path is None:
base_path = topdir
if not source_trailing_slash:
base_path = os.path.dirname(base_path)
if topdir.startswith(base_path):
offset = len(base_path)
# Make sure we're making the new paths relative
if trailing_slash_detector and not trailing_slash_detector(base_path):
offset += 1
elif not base_path.endswith(os.path.sep):
offset += 1
if os.path.islink(topdir) and not local_follow:
r_files['symlinks'] = (os.readlink(topdir), os.path.basename(topdir))
return r_files
dir_stats = os.stat(topdir)
parents = frozenset(((dir_stats.st_dev, dir_stats.st_ino),))
# Actually walk the directory hierarchy
_recurse(topdir, offset, parents)
return r_files
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def _ensure_invocation(self, result):
# NOTE: adding invocation arguments here needs to be kept in sync with
# any no_log specified in the argument_spec in the module.
# This is not automatic.
# NOTE: do not add to this. This should be made a generic function for action plugins.
# This should also use the same argspec as the module instead of keeping it in sync.
if 'invocation' not in result:
if self._play_context.no_log:
result['invocation'] = "CENSORED: no_log is set"
else:
# NOTE: Should be removed in the future. For now keep this broken
# behaviour, have a look in the PR 51582
result['invocation'] = self._task.args.copy()
result['invocation']['module_args'] = self._task.args.copy()
if isinstance(result['invocation'], dict):
if 'content' in result['invocation']:
result['invocation']['content'] = 'CENSORED: content is a no_log parameter'
if result['invocation'].get('module_args', {}).get('content') is not None:
result['invocation']['module_args']['content'] = 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
return result
def _copy_file(self, source_full, source_rel, content, content_tempfile,
dest, task_vars, follow):
decrypt = boolean(self._task.args.get('decrypt', True), strict=False)
force = boolean(self._task.args.get('force', 'yes'), strict=False)
raw = boolean(self._task.args.get('raw', 'no'), strict=False)
result = {}
result['diff'] = []
# If the local file does not exist, get_real_file() raises AnsibleFileNotFound
try:
source_full = self._loader.get_real_file(source_full, decrypt=decrypt)
except AnsibleFileNotFound as e:
result['failed'] = True
result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e))
return result
# Get the local mode and set if user wanted it preserved
# https://github.com/ansible/ansible-modules-core/issues/1124
lmode = None
if self._task.args.get('mode', None) == 'preserve':
lmode = '0%03o' % stat.S_IMODE(os.stat(source_full).st_mode)
# This is kind of optimization - if user told us destination is
# dir, do path manipulation right away, otherwise we still check
# for dest being a dir via remote call below.
if self._connection._shell.path_has_trailing_slash(dest):
dest_file = self._connection._shell.join_path(dest, source_rel)
else:
dest_file = dest
# Attempt to get remote file info
dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, checksum=force)
if dest_status['exists'] and dest_status['isdir']:
# The dest is a directory.
if content is not None:
# If source was defined as content remove the temporary file and fail out.
self._remove_tempfile_if_content_defined(content, content_tempfile)
result['failed'] = True
result['msg'] = "can not use content with a dir as dest"
return result
else:
# Append the relative source location to the destination and get remote stats again
dest_file = self._connection._shell.join_path(dest, source_rel)
dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, checksum=force)
if dest_status['exists'] and not force:
# remote_file exists so continue to next iteration.
return None
# Generate a hash of the local file.
local_checksum = checksum(source_full)
if local_checksum != dest_status['checksum']:
# The checksums don't match and we will change or error out.
if self._play_context.diff and not raw:
result['diff'].append(self._get_diff_data(dest_file, source_full, task_vars, content))
if self._play_context.check_mode:
self._remove_tempfile_if_content_defined(content, content_tempfile)
result['changed'] = True
return result
# Define a remote directory that we will copy the file to.
tmp_src = self._connection._shell.join_path(self._connection._shell.tmpdir, 'source')
remote_path = None
if not raw:
remote_path = self._transfer_file(source_full, tmp_src)
else:
self._transfer_file(source_full, dest_file)
# We have copied the file remotely and no longer require our content_tempfile
self._remove_tempfile_if_content_defined(content, content_tempfile)
self._loader.cleanup_tmp_file(source_full)
# FIXME: I don't think this is needed when PIPELINING=0 because the source is created
# world readable. Access to the directory itself is controlled via fixup_perms2() as
# part of executing the module. Check that umask with scp/sftp/piped doesn't cause
# a problem before acting on this idea. (This idea would save a round-trip)
# fix file permissions when the copy is done as a different user
if remote_path:
self._fixup_perms2((self._connection._shell.tmpdir, remote_path))
if raw:
# Continue to next iteration if raw is defined.
return None
# Run the copy module
# src and dest here come after original and override them
# we pass dest only to make sure it includes trailing slash in case of recursive copy
new_module_args = _create_remote_copy_args(self._task.args)
new_module_args.update(
dict(
src=tmp_src,
dest=dest,
_original_basename=source_rel,
follow=follow
)
)
if not self._task.args.get('checksum'):
new_module_args['checksum'] = local_checksum
if lmode:
new_module_args['mode'] = lmode
module_return = self._execute_module(module_name='ansible.legacy.copy', module_args=new_module_args, task_vars=task_vars)
else:
# no need to transfer the file, already correct hash, but still need to call
# the file module in case we want to change attributes
self._remove_tempfile_if_content_defined(content, content_tempfile)
self._loader.cleanup_tmp_file(source_full)
if raw:
return None
# Fix for https://github.com/ansible/ansible-modules-core/issues/1568.
# If checksums match, and follow = True, find out if 'dest' is a link. If so,
# change it to point to the source of the link.
if follow:
dest_status_nofollow = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=False)
if dest_status_nofollow['islnk'] and 'lnk_source' in dest_status_nofollow.keys():
dest = dest_status_nofollow['lnk_source']
# Build temporary module_args.
new_module_args = _create_remote_file_args(self._task.args)
new_module_args.update(
dict(
dest=dest,
_original_basename=source_rel,
recurse=False,
state='file',
)
)
# src is sent to the file module in _original_basename, not in src
try:
del new_module_args['src']
except KeyError:
pass
if lmode:
new_module_args['mode'] = lmode
# Execute the file module.
module_return = self._execute_module(module_name='ansible.legacy.file', module_args=new_module_args, task_vars=task_vars)
if not module_return.get('checksum'):
module_return['checksum'] = local_checksum
result.update(module_return)
return result
def _create_content_tempfile(self, content):
''' Create a tempfile containing defined content '''
fd, content_tempfile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
f = os.fdopen(fd, 'wb')
content = to_bytes(content)
try:
f.write(content)
except Exception as err:
os.remove(content_tempfile)
raise Exception(err)
finally:
f.close()
return content_tempfile
def _remove_tempfile_if_content_defined(self, content, content_tempfile):
if content is not None:
os.remove(content_tempfile)
def run(self, tmp=None, task_vars=None):
''' handler for file transfer operations '''
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
source = self._task.args.get('src', None)
content = self._task.args.get('content', None)
dest = self._task.args.get('dest', None)
remote_src = boolean(self._task.args.get('remote_src', False), strict=False)
local_follow = boolean(self._task.args.get('local_follow', True), strict=False)
result['failed'] = True
if not source and content is None:
result['msg'] = 'src (or content) is required'
elif not dest:
result['msg'] = 'dest is required'
elif source and content is not None:
result['msg'] = 'src and content are mutually exclusive'
elif content is not None and dest is not None and dest.endswith("/"):
result['msg'] = "can not use content with a dir as dest"
else:
del result['failed']
if result.get('failed'):
return self._ensure_invocation(result)
# Define content_tempfile in case we set it after finding content populated.
content_tempfile = None
# If content is defined make a tmp file and write the content into it.
if content is not None:
try:
# If content comes to us as a dict it should be decoded json.
# We need to encode it back into a string to write it out.
if isinstance(content, dict) or isinstance(content, list):
content_tempfile = self._create_content_tempfile(json.dumps(content))
else:
content_tempfile = self._create_content_tempfile(content)
source = content_tempfile
except Exception as err:
result['failed'] = True
result['msg'] = "could not write content temp file: %s" % to_native(err)
return self._ensure_invocation(result)
# if we have first_available_file in our vars
# look up the files and use the first one we find as src
elif remote_src:
result.update(self._execute_module(module_name='ansible.legacy.copy', task_vars=task_vars))
return self._ensure_invocation(result)
else:
# find_needle returns a path that may not have a trailing slash on
# a directory so we need to determine that now (we use it just
# like rsync does to figure out whether to include the directory
# or only the files inside the directory
trailing_slash = source.endswith(os.path.sep)
try:
# find in expected paths
source = self._find_needle('files', source)
except AnsibleError as e:
result['failed'] = True
result['msg'] = to_text(e)
result['exception'] = traceback.format_exc()
return self._ensure_invocation(result)
if trailing_slash != source.endswith(os.path.sep):
if source[-1] == os.path.sep:
source = source[:-1]
else:
source = source + os.path.sep
# A list of source file tuples (full_path, relative_path) which will try to copy to the destination
source_files = {'files': [], 'directories': [], 'symlinks': []}
# If source is a directory populate our list else source is a file and translate it to a tuple.
if os.path.isdir(to_bytes(source, errors='surrogate_or_strict')):
# Get a list of the files we want to replicate on the remote side
source_files = _walk_dirs(source, local_follow=local_follow,
trailing_slash_detector=self._connection._shell.path_has_trailing_slash)
# If it's recursive copy, destination is always a dir,
# explicitly mark it so (note - copy module relies on this).
if not self._connection._shell.path_has_trailing_slash(dest):
dest = self._connection._shell.join_path(dest, '')
# FIXME: Can we optimize cases where there's only one file, no
# symlinks and any number of directories? In the original code,
# empty directories are not copied....
else:
source_files['files'] = [(source, os.path.basename(source))]
changed = False
module_return = dict(changed=False)
# A register for if we executed a module.
# Used to cut down on command calls when not recursive.
module_executed = False
# expand any user home dir specifier
dest = self._remote_expand_user(dest)
implicit_directories = set()
for source_full, source_rel in source_files['files']:
# copy files over. This happens first as directories that have
# a file do not need to be created later
# We only follow symlinks for files in the non-recursive case
if source_files['directories']:
follow = False
else:
follow = boolean(self._task.args.get('follow', False), strict=False)
module_return = self._copy_file(source_full, source_rel, content, content_tempfile, dest, task_vars, follow)
if module_return is None:
continue
if module_return.get('failed'):
result.update(module_return)
return self._ensure_invocation(result)
paths = os.path.split(source_rel)
dir_path = ''
for dir_component in paths:
os.path.join(dir_path, dir_component)
implicit_directories.add(dir_path)
if 'diff' in result and not result['diff']:
del result['diff']
module_executed = True
changed = changed or module_return.get('changed', False)
for src, dest_path in source_files['directories']:
# Find directories that are leaves as they might not have been
# created yet.
if dest_path in implicit_directories:
continue
# Use file module to create these
new_module_args = _create_remote_file_args(self._task.args)
new_module_args['path'] = os.path.join(dest, dest_path)
new_module_args['state'] = 'directory'
new_module_args['mode'] = self._task.args.get('directory_mode', None)
new_module_args['recurse'] = False
del new_module_args['src']
module_return = self._execute_module(module_name='ansible.legacy.file', module_args=new_module_args, task_vars=task_vars)
if module_return.get('failed'):
result.update(module_return)
return self._ensure_invocation(result)
module_executed = True
changed = changed or module_return.get('changed', False)
for target_path, dest_path in source_files['symlinks']:
# Copy symlinks over
new_module_args = _create_remote_file_args(self._task.args)
new_module_args['path'] = os.path.join(dest, dest_path)
new_module_args['src'] = target_path
new_module_args['state'] = 'link'
new_module_args['force'] = True
# Only follow remote symlinks in the non-recursive case
if source_files['directories']:
new_module_args['follow'] = False
# file module cannot deal with 'preserve' mode and is meaningless
# for symlinks anyway, so just don't pass it.
if new_module_args.get('mode', None) == 'preserve':
new_module_args.pop('mode')
module_return = self._execute_module(module_name='ansible.legacy.file', module_args=new_module_args, task_vars=task_vars)
module_executed = True
if module_return.get('failed'):
result.update(module_return)
return self._ensure_invocation(result)
changed = changed or module_return.get('changed', False)
if module_executed and len(source_files['files']) == 1:
result.update(module_return)
# the file module returns the file path as 'path', but
# the copy module uses 'dest', so add it if it's not there
if 'path' in result and 'dest' not in result:
result['dest'] = result['path']
else:
result.update(dict(dest=dest, src=source, changed=changed))
# Delete tmp path
self._remove_tmp_path(self._connection._shell.tmpdir)
return self._ensure_invocation(result)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,142 |
Cannot use ansible.builtin.copy with /bin/ansible-config list -c %s
|
### Summary
I should be able to manage the `/etc/ansible/ansible.cfg` file on my ansible control host with ansible. So, when I try this:
```
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
the validation fails because the file indicated by %s does not have a .cfg extension.
### Issue Type
Bug Report
### Component Name
copy
### Ansible Version
```console
$ ansible --version
ansible [core 2.14.10]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/steve/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/steve/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.5 (main, Aug 28 2023, 00:00:00) [GCC 13.2.1 20230728 (Red Hat 13.2.1-1)] (/usr/bin/python3)
jinja version = 3.0.3
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = /etc/ansible/ansible.cfg
```
### OS / Environment
Fedora 38
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
### Expected Results
I expected ansible-config to behave similarly to ansible-inventory, or, failing that to (a) have a different return code for the file extension "error" (not 5) and to test the content before the extension.
### Actual Results
```console
[steve@jabberwock ~]$ ansible-playbook -i shared/ansible/server_IaC/files/arthur/etc/ansible/hosts temp.yml
PLAY [arthur] ****************************************************************************************************************************
TASK [Install ansible hosts and ansible.cfg] *********************************************************************************************
ok: [arthur] => (item={'file': 'hosts', 'validate': '/bin/ansible-inventory --list --inventory %s'})
failed: [arthur] (item={'file': 'ansible.cfg', 'validate': '/bin/ansible-config list -c %s'}) => {"ansible_loop_var": "item", "changed": false, "checksum": "d17e17e9639b5df25890b6ecbab867b9e329f40f", "exit_status": 5, "item": {"file": "ansible.cfg", "validate": "/bin/ansible-config list -c %s"}, "msg": "failed to validate", "stderr": "ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: \n", "stderr_lines": ["ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: "], "stdout": "usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...\n\nView ansible configuration.\n\npositional arguments:\n {list,dump,view,init}\n list Print all config options\n dump Dump configuration\n view View configuration file\n init Create initial configuration\n\noptions:\n --version show program's version number, config file location,\n configured module search path, module location,\n executable location and exit\n -h, --help show this help message and exit\n -v, --verbose Causes Ansible to print more debug messages. Adding\n multiple -v will increase the verbosity, the builtin\n plugins currently evaluate up to -vvvvvv. A reasonable\n level to start is -vvv, connection debugging might\n require -vvvv.\n", "stdout_lines": ["usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...", "", "View ansible configuration.", "", "positional arguments:", " {list,dump,view,init}", " list Print all config options", " dump Dump configuration", " view View configuration file", " init Create initial configuration", "", "options:", " --version show program's version number, config file location,", " configured module search path, module location,", " executable location and exit", " -h, --help show this help message and exit", " -v, --verbose Causes Ansible to print more debug messages. Adding", " multiple -v will increase the verbosity, the builtin", " plugins currently evaluate up to -vvvvvv. A reasonable", " level to start is -vvv, connection debugging might", " require -vvvv."]}
PLAY RECAP *******************************************************************************************************************************
arthur : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82142
|
https://github.com/ansible/ansible/pull/82158
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
|
4a84a9b3db47028c621d04cda8b2d3a3190173cd
| 2023-11-06T17:39:49Z |
python
| 2023-11-13T15:03:58Z |
test/integration/targets/callback_default/callback_default.out.result_format_yaml_lossy_verbose.stdout
|
PLAY [testhost] ****************************************************************
TASK [Changed task] ************************************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
TASK [Ok task] *****************************************************************
ok: [testhost] =>
changed: false
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
TASK [Failed task] *************************************************************
fatal: [testhost]: FAILED! =>
changed: false
msg: no reason
...ignoring
TASK [Skipped task] ************************************************************
skipping: [testhost] =>
changed: false
false_condition: false
skip_reason: Conditional result was False
TASK [Task with var in name (foo bar)] *****************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
TASK [Loop task] ***************************************************************
changed: [testhost] => (item=foo-1) =>
ansible_loop_var: item
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
item: 1
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
changed: [testhost] => (item=foo-2) =>
ansible_loop_var: item
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
item: 2
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
changed: [testhost] => (item=foo-3) =>
ansible_loop_var: item
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
item: 3
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
TASK [debug loop] **************************************************************
changed: [testhost] => (item=debug-1) =>
msg: debug-1
failed: [testhost] (item=debug-2) =>
msg: debug-2
ok: [testhost] => (item=debug-3) =>
msg: debug-3
skipping: [testhost] => (item=debug-4) =>
ansible_loop_var: item
false_condition: item != 4
item: 4
fatal: [testhost]: FAILED! =>
msg: One or more items failed
...ignoring
TASK [EXPECTED FAILURE Failed task to be rescued] ******************************
fatal: [testhost]: FAILED! =>
changed: false
msg: Failed as requested from task
TASK [Rescue task] *************************************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- rescued
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: rescued
stdout_lines: <omitted>
TASK [include_tasks] ***********************************************************
included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
TASK [debug] *******************************************************************
ok: [testhost] =>
item: 1
TASK [copy] ********************************************************************
changed: [testhost] =>
changed: true
checksum: 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33
dest: .../test_diff.txt
gid: 0
group: root
md5sum: acbd18db4cc2f85cedef654fccc4a4d8
mode: '0644'
owner: root
size: 3
src: .../source
state: file
uid: 0
TASK [replace] *****************************************************************
--- before: .../test_diff.txt
+++ after: .../test_diff.txt
@@ -1 +1 @@
-foo
\ No newline at end of file
+bar
\ No newline at end of file
changed: [testhost] =>
changed: true
msg: 1 replacements made
rc: 0
TASK [replace] *****************************************************************
ok: [testhost] =>
changed: false
msg: 1 replacements made
rc: 0
TASK [debug] *******************************************************************
skipping: [testhost] =>
skipped_reason: No items in the list
TASK [debug] *******************************************************************
skipping: [testhost] =>
skipped_reason: No items in the list
TASK [debug] *******************************************************************
skipping: [testhost] => (item=1) =>
ansible_loop_var: item
false_condition: false
item: 1
skipping: [testhost] => (item=2) =>
ansible_loop_var: item
false_condition: false
item: 2
skipping: [testhost] =>
msg: All items skipped
RUNNING HANDLER [Test handler 1] ***********************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
RUNNING HANDLER [Test handler 2] ***********************************************
ok: [testhost] =>
changed: false
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
RUNNING HANDLER [Test handler 3] ***********************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
PLAY [testhost] ****************************************************************
TASK [First free task] *********************************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
TASK [Second free task] ********************************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: <omitted>
stdout: foo
stdout_lines: <omitted>
TASK [Include some tasks] ******************************************************
included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
TASK [debug] *******************************************************************
ok: [testhost] =>
item: 1
PLAY RECAP *********************************************************************
testhost : ok=19 changed=11 unreachable=0 failed=0 skipped=4 rescued=1 ignored=2
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,142 |
Cannot use ansible.builtin.copy with /bin/ansible-config list -c %s
|
### Summary
I should be able to manage the `/etc/ansible/ansible.cfg` file on my ansible control host with ansible. So, when I try this:
```
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
the validation fails because the file indicated by %s does not have a .cfg extension.
### Issue Type
Bug Report
### Component Name
copy
### Ansible Version
```console
$ ansible --version
ansible [core 2.14.10]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/steve/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/steve/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.5 (main, Aug 28 2023, 00:00:00) [GCC 13.2.1 20230728 (Red Hat 13.2.1-1)] (/usr/bin/python3)
jinja version = 3.0.3
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = /etc/ansible/ansible.cfg
```
### OS / Environment
Fedora 38
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
### Expected Results
I expected ansible-config to behave similarly to ansible-inventory, or, failing that to (a) have a different return code for the file extension "error" (not 5) and to test the content before the extension.
### Actual Results
```console
[steve@jabberwock ~]$ ansible-playbook -i shared/ansible/server_IaC/files/arthur/etc/ansible/hosts temp.yml
PLAY [arthur] ****************************************************************************************************************************
TASK [Install ansible hosts and ansible.cfg] *********************************************************************************************
ok: [arthur] => (item={'file': 'hosts', 'validate': '/bin/ansible-inventory --list --inventory %s'})
failed: [arthur] (item={'file': 'ansible.cfg', 'validate': '/bin/ansible-config list -c %s'}) => {"ansible_loop_var": "item", "changed": false, "checksum": "d17e17e9639b5df25890b6ecbab867b9e329f40f", "exit_status": 5, "item": {"file": "ansible.cfg", "validate": "/bin/ansible-config list -c %s"}, "msg": "failed to validate", "stderr": "ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: \n", "stderr_lines": ["ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: "], "stdout": "usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...\n\nView ansible configuration.\n\npositional arguments:\n {list,dump,view,init}\n list Print all config options\n dump Dump configuration\n view View configuration file\n init Create initial configuration\n\noptions:\n --version show program's version number, config file location,\n configured module search path, module location,\n executable location and exit\n -h, --help show this help message and exit\n -v, --verbose Causes Ansible to print more debug messages. Adding\n multiple -v will increase the verbosity, the builtin\n plugins currently evaluate up to -vvvvvv. A reasonable\n level to start is -vvv, connection debugging might\n require -vvvv.\n", "stdout_lines": ["usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...", "", "View ansible configuration.", "", "positional arguments:", " {list,dump,view,init}", " list Print all config options", " dump Dump configuration", " view View configuration file", " init Create initial configuration", "", "options:", " --version show program's version number, config file location,", " configured module search path, module location,", " executable location and exit", " -h, --help show this help message and exit", " -v, --verbose Causes Ansible to print more debug messages. Adding", " multiple -v will increase the verbosity, the builtin", " plugins currently evaluate up to -vvvvvv. A reasonable", " level to start is -vvv, connection debugging might", " require -vvvv."]}
PLAY RECAP *******************************************************************************************************************************
arthur : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82142
|
https://github.com/ansible/ansible/pull/82158
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
|
4a84a9b3db47028c621d04cda8b2d3a3190173cd
| 2023-11-06T17:39:49Z |
python
| 2023-11-13T15:03:58Z |
test/integration/targets/callback_default/callback_default.out.result_format_yaml_verbose.stdout
|
PLAY [testhost] ****************************************************************
TASK [Changed task] ************************************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
TASK [Ok task] *****************************************************************
ok: [testhost] =>
changed: false
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
TASK [Failed task] *************************************************************
fatal: [testhost]: FAILED! =>
changed: false
msg: no reason
...ignoring
TASK [Skipped task] ************************************************************
skipping: [testhost] =>
changed: false
false_condition: false
skip_reason: Conditional result was False
TASK [Task with var in name (foo bar)] *****************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
TASK [Loop task] ***************************************************************
changed: [testhost] => (item=foo-1) =>
ansible_loop_var: item
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
item: 1
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
changed: [testhost] => (item=foo-2) =>
ansible_loop_var: item
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
item: 2
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
changed: [testhost] => (item=foo-3) =>
ansible_loop_var: item
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
item: 3
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
TASK [debug loop] **************************************************************
changed: [testhost] => (item=debug-1) =>
msg: debug-1
failed: [testhost] (item=debug-2) =>
msg: debug-2
ok: [testhost] => (item=debug-3) =>
msg: debug-3
skipping: [testhost] => (item=debug-4) =>
ansible_loop_var: item
false_condition: item != 4
item: 4
fatal: [testhost]: FAILED! =>
msg: One or more items failed
...ignoring
TASK [EXPECTED FAILURE Failed task to be rescued] ******************************
fatal: [testhost]: FAILED! =>
changed: false
msg: Failed as requested from task
TASK [Rescue task] *************************************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- rescued
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: rescued
stdout_lines:
- rescued
TASK [include_tasks] ***********************************************************
included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
TASK [debug] *******************************************************************
ok: [testhost] =>
item: 1
TASK [copy] ********************************************************************
changed: [testhost] =>
changed: true
checksum: 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33
dest: .../test_diff.txt
gid: 0
group: root
md5sum: acbd18db4cc2f85cedef654fccc4a4d8
mode: '0644'
owner: root
size: 3
src: .../source
state: file
uid: 0
TASK [replace] *****************************************************************
--- before: .../test_diff.txt
+++ after: .../test_diff.txt
@@ -1 +1 @@
-foo
\ No newline at end of file
+bar
\ No newline at end of file
changed: [testhost] =>
changed: true
msg: 1 replacements made
rc: 0
TASK [replace] *****************************************************************
ok: [testhost] =>
changed: false
msg: 1 replacements made
rc: 0
TASK [debug] *******************************************************************
skipping: [testhost] =>
skipped_reason: No items in the list
TASK [debug] *******************************************************************
skipping: [testhost] =>
skipped_reason: No items in the list
TASK [debug] *******************************************************************
skipping: [testhost] => (item=1) =>
ansible_loop_var: item
false_condition: false
item: 1
skipping: [testhost] => (item=2) =>
ansible_loop_var: item
false_condition: false
item: 2
skipping: [testhost] =>
msg: All items skipped
RUNNING HANDLER [Test handler 1] ***********************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
RUNNING HANDLER [Test handler 2] ***********************************************
ok: [testhost] =>
changed: false
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
RUNNING HANDLER [Test handler 3] ***********************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
PLAY [testhost] ****************************************************************
TASK [First free task] *********************************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
TASK [Second free task] ********************************************************
changed: [testhost] =>
changed: true
cmd:
- echo
- foo
delta: '0:00:00.000000'
end: '0000-00-00 00:00:00.000000'
msg: ''
rc: 0
start: '0000-00-00 00:00:00.000000'
stderr: ''
stderr_lines: []
stdout: foo
stdout_lines:
- foo
TASK [Include some tasks] ******************************************************
included: .../test/integration/targets/callback_default/include_me.yml for testhost => (item=1)
TASK [debug] *******************************************************************
ok: [testhost] =>
item: 1
PLAY RECAP *********************************************************************
testhost : ok=19 changed=11 unreachable=0 failed=0 skipped=4 rescued=1 ignored=2
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,142 |
Cannot use ansible.builtin.copy with /bin/ansible-config list -c %s
|
### Summary
I should be able to manage the `/etc/ansible/ansible.cfg` file on my ansible control host with ansible. So, when I try this:
```
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
the validation fails because the file indicated by %s does not have a .cfg extension.
### Issue Type
Bug Report
### Component Name
copy
### Ansible Version
```console
$ ansible --version
ansible [core 2.14.10]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/steve/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/steve/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.5 (main, Aug 28 2023, 00:00:00) [GCC 13.2.1 20230728 (Red Hat 13.2.1-1)] (/usr/bin/python3)
jinja version = 3.0.3
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = /etc/ansible/ansible.cfg
```
### OS / Environment
Fedora 38
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: Install ansible hosts and ansible.cfg
copy:
src: "{{ file_directory }}/etc/ansible/{{ item.file }}"
dest: '/etc/ansible/{{ item.file }}'
owner: root
group: root
mode: '0644'
backup: yes
force: yes
validate: "{{ item.validate }}"
loop:
- file: hosts
validate: /bin/ansible-inventory --list --inventory %s
- file: ansible.cfg
validate: /bin/ansible-config list -c %s
```
### Expected Results
I expected ansible-config to behave similarly to ansible-inventory, or, failing that to (a) have a different return code for the file extension "error" (not 5) and to test the content before the extension.
### Actual Results
```console
[steve@jabberwock ~]$ ansible-playbook -i shared/ansible/server_IaC/files/arthur/etc/ansible/hosts temp.yml
PLAY [arthur] ****************************************************************************************************************************
TASK [Install ansible hosts and ansible.cfg] *********************************************************************************************
ok: [arthur] => (item={'file': 'hosts', 'validate': '/bin/ansible-inventory --list --inventory %s'})
failed: [arthur] (item={'file': 'ansible.cfg', 'validate': '/bin/ansible-config list -c %s'}) => {"ansible_loop_var": "item", "changed": false, "checksum": "d17e17e9639b5df25890b6ecbab867b9e329f40f", "exit_status": 5, "item": {"file": "ansible.cfg", "validate": "/bin/ansible-config list -c %s"}, "msg": "failed to validate", "stderr": "ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: \n", "stderr_lines": ["ERROR! Unsupported configuration file extension for /home/ansible/.ansible/tmp/ansible-tmp-1699292001.685196-111356-55146881241278/source: "], "stdout": "usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...\n\nView ansible configuration.\n\npositional arguments:\n {list,dump,view,init}\n list Print all config options\n dump Dump configuration\n view View configuration file\n init Create initial configuration\n\noptions:\n --version show program's version number, config file location,\n configured module search path, module location,\n executable location and exit\n -h, --help show this help message and exit\n -v, --verbose Causes Ansible to print more debug messages. Adding\n multiple -v will increase the verbosity, the builtin\n plugins currently evaluate up to -vvvvvv. A reasonable\n level to start is -vvv, connection debugging might\n require -vvvv.\n", "stdout_lines": ["usage: ansible-config [-h] [--version] [-v] {list,dump,view,init} ...", "", "View ansible configuration.", "", "positional arguments:", " {list,dump,view,init}", " list Print all config options", " dump Dump configuration", " view View configuration file", " init Create initial configuration", "", "options:", " --version show program's version number, config file location,", " configured module search path, module location,", " executable location and exit", " -h, --help show this help message and exit", " -v, --verbose Causes Ansible to print more debug messages. Adding", " multiple -v will increase the verbosity, the builtin", " plugins currently evaluate up to -vvvvvv. A reasonable", " level to start is -vvv, connection debugging might", " require -vvvv."]}
PLAY RECAP *******************************************************************************************************************************
arthur : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82142
|
https://github.com/ansible/ansible/pull/82158
|
a8b6ef7e7cbabaf87e57ea7df9df75eb7e7d1ab5
|
4a84a9b3db47028c621d04cda8b2d3a3190173cd
| 2023-11-06T17:39:49Z |
python
| 2023-11-13T15:03:58Z |
test/integration/targets/callback_default/runme.sh
|
#!/usr/bin/env bash
# This test compares "known good" output with various settings against output
# with the current code. It's brittle by nature, but this is probably the
# "best" approach possible.
#
# Notes:
# * options passed to this script (such as -v) are ignored, as they would change
# the output and break the test
# * the number of asterisks after a "banner" differs depending on the number of
# columns on the TTY, so we must adjust the columns for the current session
# for consistency
set -eux
run_test() {
local testname=$1
local playbook=$2
# output was recorded w/o cowsay, ensure we reproduce the same
export ANSIBLE_NOCOWS=1
# The shenanigans with redirection and 'tee' are to capture STDOUT and
# STDERR separately while still displaying both to the console
{ ansible-playbook -i inventory "$playbook" "${@:3}" \
> >(set +x; tee "${OUTFILE}.${testname}.stdout"); } \
2> >(set +x; tee "${OUTFILE}.${testname}.stderr" >&2)
# Scrub deprication warning that shows up in Python 2.6 on CentOS 6
sed -i -e '/RandomPool_DeprecationWarning/d' "${OUTFILE}.${testname}.stderr"
sed -i -e 's/included: .*\/test\/integration/included: ...\/test\/integration/g' "${OUTFILE}.${testname}.stdout"
sed -i -e 's/@@ -1,1 +1,1 @@/@@ -1 +1 @@/g' "${OUTFILE}.${testname}.stdout"
sed -i -e 's/: .*\/test_diff\.txt/: ...\/test_diff.txt/g' "${OUTFILE}.${testname}.stdout"
sed -i -e "s#${ANSIBLE_PLAYBOOK_DIR}#TEST_PATH#g" "${OUTFILE}.${testname}.stdout"
sed -i -e 's/^Using .*//g' "${OUTFILE}.${testname}.stdout"
sed -i -e 's/[0-9]:[0-9]\{2\}:[0-9]\{2\}\.[0-9]\{6\}/0:00:00.000000/g' "${OUTFILE}.${testname}.stdout"
sed -i -e 's/[0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\} [0-9]\{2\}:[0-9]\{2\}:[0-9]\{2\}\.[0-9]\{6\}/0000-00-00 00:00:00.000000/g' "${OUTFILE}.${testname}.stdout"
sed -i -e 's#: .*/source$#: .../source#g' "${OUTFILE}.${testname}.stdout"
sed -i -e '/secontext:/d' "${OUTFILE}.${testname}.stdout"
sed -i -e 's/group: wheel/group: root/g' "${OUTFILE}.${testname}.stdout"
diff -u "${ORIGFILE}.${testname}.stdout" "${OUTFILE}.${testname}.stdout" || diff_failure
diff -u "${ORIGFILE}.${testname}.stderr" "${OUTFILE}.${testname}.stderr" || diff_failure
}
run_test_dryrun() {
local testname=$1
# optional, pass --check to run a dry run
local chk=${2:-}
# outout was recorded w/o cowsay, ensure we reproduce the same
export ANSIBLE_NOCOWS=1
# This needed to satisfy shellcheck that can not accept unquoted variable
cmd="ansible-playbook -i inventory ${chk} test_dryrun.yml"
# The shenanigans with redirection and 'tee' are to capture STDOUT and
# STDERR separately while still displaying both to the console
{ $cmd \
> >(set +x; tee "${OUTFILE}.${testname}.stdout"); } \
2> >(set +x; tee "${OUTFILE}.${testname}.stderr" >&2)
# Scrub deprication warning that shows up in Python 2.6 on CentOS 6
sed -i -e '/RandomPool_DeprecationWarning/d' "${OUTFILE}.${testname}.stderr"
diff -u "${ORIGFILE}.${testname}.stdout" "${OUTFILE}.${testname}.stdout" || diff_failure
diff -u "${ORIGFILE}.${testname}.stderr" "${OUTFILE}.${testname}.stderr" || diff_failure
}
diff_failure() {
if [[ $INIT = 0 ]]; then
echo "FAILURE...diff mismatch!"
exit 1
fi
}
cleanup() {
if [[ $INIT = 0 ]]; then
rm -rf "${OUTFILE}.*"
fi
if [[ -f "${BASEFILE}.unreachable.stdout" ]]; then
rm -rf "${BASEFILE}.unreachable.stdout"
fi
if [[ -f "${BASEFILE}.unreachable.stderr" ]]; then
rm -rf "${BASEFILE}.unreachable.stderr"
fi
# Restore TTY cols
if [[ -n ${TTY_COLS:-} ]]; then
stty cols "${TTY_COLS}"
fi
}
adjust_tty_cols() {
if [[ -t 1 ]]; then
# Preserve existing TTY cols
TTY_COLS=$( stty -a | grep -Eo '; columns [0-9]+;' | cut -d';' -f2 | cut -d' ' -f3 )
# Override TTY cols to make comparing ansible-playbook output easier
# This value matches the default in the code when there is no TTY
stty cols 79
fi
}
BASEFILE=callback_default.out
ORIGFILE="${BASEFILE}"
OUTFILE="${BASEFILE}.new"
trap 'cleanup' EXIT
# The --init flag will (re)generate the "good" output files used by the tests
INIT=0
if [[ ${1:-} == "--init" ]]; then
shift
OUTFILE=$ORIGFILE
INIT=1
fi
adjust_tty_cols
# Force the 'default' callback plugin, since that's what we're testing
export ANSIBLE_STDOUT_CALLBACK=default
# Disable color in output for consistency
export ANSIBLE_FORCE_COLOR=0
export ANSIBLE_NOCOLOR=1
# Default settings
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=1
export ANSIBLE_DISPLAY_OK_HOSTS=1
export ANSIBLE_DISPLAY_FAILED_STDERR=0
export ANSIBLE_CHECK_MODE_MARKERS=0
run_test default test.yml
# Check for async output
# NOTE: regex to match 1 or more digits works for both BSD and GNU grep
ansible-playbook -i inventory test_async.yml 2>&1 | tee async_test.out
grep "ASYNC OK .* jid=j[0-9]\{1,\}" async_test.out
grep "ASYNC FAILED .* jid=j[0-9]\{1,\}" async_test.out
rm -f async_test.out
# Hide skipped
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=0
run_test hide_skipped test.yml
# Hide skipped/ok
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=0
export ANSIBLE_DISPLAY_OK_HOSTS=0
run_test hide_skipped_ok test.yml
# Hide ok
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=1
export ANSIBLE_DISPLAY_OK_HOSTS=0
run_test hide_ok test.yml
# Failed to stderr
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=1
export ANSIBLE_DISPLAY_OK_HOSTS=1
export ANSIBLE_DISPLAY_FAILED_STDERR=1
run_test failed_to_stderr test.yml
export ANSIBLE_DISPLAY_FAILED_STDERR=0
# Test displaying task path on failure
export ANSIBLE_SHOW_TASK_PATH_ON_FAILURE=1
run_test display_path_on_failure test.yml
export ANSIBLE_SHOW_TASK_PATH_ON_FAILURE=0
# Default settings with unreachable tasks
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=1
export ANSIBLE_DISPLAY_OK_HOSTS=1
export ANSIBLE_DISPLAY_FAILED_STDERR=1
export ANSIBLE_TIMEOUT=1
# Check if UNREACHBLE is available in stderr
set +e
ansible-playbook -i inventory test_2.yml > >(set +x; tee "${BASEFILE}.unreachable.stdout";) 2> >(set +x; tee "${BASEFILE}.unreachable.stderr" >&2) || true
set -e
if test "$(grep -c 'UNREACHABLE' "${BASEFILE}.unreachable.stderr")" -ne 1; then
echo "Test failed"
exit 1
fi
export ANSIBLE_DISPLAY_FAILED_STDERR=0
export ANSIBLE_CALLBACK_RESULT_FORMAT=yaml
run_test result_format_yaml test.yml
export ANSIBLE_CALLBACK_RESULT_FORMAT=json
export ANSIBLE_CALLBACK_RESULT_FORMAT=yaml
export ANSIBLE_CALLBACK_FORMAT_PRETTY=1
run_test result_format_yaml_lossy_verbose test.yml -v
run_test yaml_result_format_yaml_verbose test_yaml.yml -v
export ANSIBLE_CALLBACK_RESULT_FORMAT=json
unset ANSIBLE_CALLBACK_FORMAT_PRETTY
export ANSIBLE_CALLBACK_RESULT_FORMAT=yaml
export ANSIBLE_CALLBACK_FORMAT_PRETTY=0
run_test result_format_yaml_verbose test.yml -v
export ANSIBLE_CALLBACK_RESULT_FORMAT=json
unset ANSIBLE_CALLBACK_FORMAT_PRETTY
## DRY RUN tests
#
# Default settings with dry run tasks
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=1
export ANSIBLE_DISPLAY_OK_HOSTS=1
export ANSIBLE_DISPLAY_FAILED_STDERR=1
# Enable Check mode markers
export ANSIBLE_CHECK_MODE_MARKERS=1
# Test the wet run with check markers
run_test_dryrun check_markers_wet
# Test the dry run with check markers
run_test_dryrun check_markers_dry --check
# Disable Check mode markers
export ANSIBLE_CHECK_MODE_MARKERS=0
# Test the wet run without check markers
run_test_dryrun check_nomarkers_wet
# Test the dry run without check markers
run_test_dryrun check_nomarkers_dry --check
# Make sure implicit meta tasks are not printed
ansible-playbook -i host1,host2 no_implicit_meta_banners.yml > meta_test.out
cat meta_test.out
[ "$(grep -c 'TASK \[meta\]' meta_test.out)" -eq 0 ]
rm -f meta_test.out
# Ensure free/host_pinned non-lockstep strategies display correctly
diff -u callback_default.out.free.stdout <(ANSIBLE_STRATEGY=free ansible-playbook -i inventory test_non_lockstep.yml 2>/dev/null)
diff -u callback_default.out.fqcn_free.stdout <(ANSIBLE_STRATEGY=ansible.builtin.free ansible-playbook -i inventory test_non_lockstep.yml 2>/dev/null)
diff -u callback_default.out.host_pinned.stdout <(ANSIBLE_STRATEGY=host_pinned ansible-playbook -i inventory test_non_lockstep.yml 2>/dev/null)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,199 |
ansible.builtin.union documentation not matching implementation
|
### Summary
https://docs.ansible.com/ansible/latest/collections/ansible/builtin/union_filter.html#examples shows:
\# return the unique elements of list1 added to list2
\# list1: [1, 2, 5, 1, 3, 4, 10]
\# list2: [1, 2, 3, 4, 5, 11, 99]
{{ list1 | union(list2) }}
\# => [1, 2, 5, 1, 3, 4, 10, 11, 99]
But running a test playbook with the above gives different result:
```yaml
- name: test
vars:
list1: [1, 2, 5, 1, 3, 4, 10]
list2: [1, 2, 3, 4, 5, 11, 99]
debug:
msg: |
{{ list1 | union(list2) | string }}
```
```
ok: [localhost] =>
msg: |-
[1, 2, 5, 3, 4, 10, 11, 99]
```
I think documentation not the code needs to be adjusted here. Thanks.
### Issue Type
Bug Report
### Component Name
core
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.6]
config file = /home/testuser/.ansible.cfg
configured module search path = ['/home/testuser/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/testuser/ansible/ansible-2.15/lib64/python3.9/site-packages/ansible
ansible collection location = /home/testuser/.ansible/collections:/usr/share/ansible/collections
executable location = /home/testuser/ansible/ansible-2.15/bin/ansible
python version = 3.9.18 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/home/testuser/ansible/ansible-2.15/bin/python3.9)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
ANSIBLE_NOCOWS(/home/testuser/.ansible.cfg) = True
```
### OS / Environment
This is on RHEL 9 with pip installed ansible 2.15.
### Steps to Reproduce
```yaml
- hosts: localhost
connection: local
gather_facts: false
tasks:
- name: test
vars:
list1: [1, 2, 5, 1, 3, 4, 10]
list2: [1, 2, 3, 4, 5, 11, 99]
debug:
msg: |
{{ list1 | union(list2) | string }}
```
### Expected Results
Documentation and output match.
### Actual Results
```console
Documentation:
[1, 2, 5, 1, 3, 4, 10, 11, 99]
Actual output:
[1, 2, 5, 3, 4, 10, 11, 99]
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82199
|
https://github.com/ansible/ansible/pull/82202
|
4a84a9b3db47028c621d04cda8b2d3a3190173cd
|
2277d470b38ff239f87b501c385d2af3948bb841
| 2023-11-13T09:57:30Z |
python
| 2023-11-13T19:59:07Z |
lib/ansible/plugins/filter/union.yml
|
DOCUMENTATION:
name: union
author: Brian Coca (@bcoca)
version_added: "1.4"
short_description: union of lists
description:
- Provide a unique list of all the elements of two lists.
- Items in the resulting list are returned in arbitrary order.
options:
_input:
description: A list.
type: list
required: true
_second_list:
description: A list.
type: list
required: true
seealso:
- plugin_type: filter
plugin: ansible.builtin.difference
- plugin_type: filter
plugin: ansible.builtin.intersect
- plugin_type: filter
plugin: ansible.builtin.symmetric_difference
- plugin_type: filter
plugin: ansible.builtin.unique
EXAMPLES: |
# return the unique elements of list1 added to list2
# list1: [1, 2, 5, 1, 3, 4, 10]
# list2: [1, 2, 3, 4, 5, 11, 99]
{{ list1 | union(list2) }}
# => [1, 2, 5, 1, 3, 4, 10, 11, 99]
RETURN:
_value:
description: A unique list of all the elements from both lists.
type: list
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,020 |
Add RHEL 9.3 to ansible-test
|
### Summary
This is a remote VM addition.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82020
|
https://github.com/ansible/ansible/pull/82178
|
2277d470b38ff239f87b501c385d2af3948bb841
|
0bab08ee33a1aad1908f54534b48ece66cff7c50
| 2023-10-18T19:38:29Z |
python
| 2023-11-14T07:23:44Z |
.azure-pipelines/azure-pipelines.yml
|
trigger:
batch: true
branches:
include:
- devel
- stable-*
pr:
autoCancel: true
branches:
include:
- devel
- stable-*
schedules:
- cron: 0 7 * * *
displayName: Nightly
always: true
branches:
include:
- devel
- stable-*
variables:
- name: checkoutPath
value: ansible
- name: coverageBranches
value: devel
- name: entryPoint
value: .azure-pipelines/commands/entry-point.sh
- name: fetchDepth
value: 500
- name: defaultContainer
value: quay.io/ansible/azure-pipelines-test-container:4.0.1
pool: Standard
stages:
- stage: Sanity
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: sanity/{0}
targets:
- test: 1
- test: 2
- stage: Units
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: units/{0}
targets:
- test: 3.7
- test: 3.8
- test: 3.9
- test: '3.10'
- test: 3.11
- test: 3.12
- stage: Windows
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Server {0}
testFormat: windows/{0}/1
targets:
- test: 2016
- test: 2019
- test: 2022
- stage: Remote
dependsOn: []
jobs:
- template: templates/matrix.yml # context/target
parameters:
targets:
- name: macOS 13.2
test: macos/13.2
- name: RHEL 9.2 py39
test: rhel/[email protected]
- name: RHEL 9.2 py311
test: rhel/[email protected]
- name: FreeBSD 13.2
test: freebsd/13.2
groups:
- 1
- 2
- template: templates/matrix.yml # context/controller
parameters:
targets:
- name: macOS 13.2
test: macos/13.2
- name: RHEL 9.2
test: rhel/9.2
- name: FreeBSD 13.2
test: freebsd/13.2
groups:
- 3
- 4
- 5
- template: templates/matrix.yml # context/controller (ansible-test container management)
parameters:
targets:
- name: Alpine 3.18
test: alpine/3.18
- name: Fedora 38
test: fedora/38
- name: RHEL 9.2
test: rhel/9.2
- name: Ubuntu 22.04
test: ubuntu/22.04
groups:
- 6
- stage: Docker
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: linux/{0}
targets:
- name: Alpine 3
test: alpine3
- name: Fedora 38
test: fedora38
- name: Ubuntu 20.04
test: ubuntu2004
- name: Ubuntu 22.04
test: ubuntu2204
groups:
- 1
- 2
- template: templates/matrix.yml
parameters:
testFormat: linux/{0}
targets:
- name: Alpine 3
test: alpine3
- name: Fedora 38
test: fedora38
- name: Ubuntu 22.04
test: ubuntu2204
groups:
- 3
- 4
- 5
- stage: Galaxy
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: galaxy/{0}/1
targets:
- test: '3.10'
- test: 3.11
- test: 3.12
- stage: Generic
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: generic/{0}/1
targets:
- test: '3.10'
- test: 3.11
- test: 3.12
- stage: Incidental_Windows
displayName: Incidental Windows
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Server {0}
testFormat: i/windows/{0}
targets:
- test: 2016
- test: 2019
- test: 2022
- stage: Incidental
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: i/{0}/1
targets:
- name: IOS Python
test: ios/csr1000v/
- name: VyOS Python
test: vyos/1.1.8/
- stage: Summary
condition: succeededOrFailed()
dependsOn:
- Sanity
- Units
- Windows
- Remote
- Docker
- Galaxy
- Generic
- Incidental_Windows
- Incidental
jobs:
- template: templates/coverage.yml
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,020 |
Add RHEL 9.3 to ansible-test
|
### Summary
This is a remote VM addition.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82020
|
https://github.com/ansible/ansible/pull/82178
|
2277d470b38ff239f87b501c385d2af3948bb841
|
0bab08ee33a1aad1908f54534b48ece66cff7c50
| 2023-10-18T19:38:29Z |
python
| 2023-11-14T07:23:44Z |
changelogs/fragments/ansible-test-rhel-9.3.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,020 |
Add RHEL 9.3 to ansible-test
|
### Summary
This is a remote VM addition.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82020
|
https://github.com/ansible/ansible/pull/82178
|
2277d470b38ff239f87b501c385d2af3948bb841
|
0bab08ee33a1aad1908f54534b48ece66cff7c50
| 2023-10-18T19:38:29Z |
python
| 2023-11-14T07:23:44Z |
test/lib/ansible_test/_data/completion/remote.txt
|
alpine/3.18 python=3.11 become=doas_sudo provider=aws arch=x86_64
alpine become=doas_sudo provider=aws arch=x86_64
fedora/38 python=3.11 become=sudo provider=aws arch=x86_64
fedora become=sudo provider=aws arch=x86_64
freebsd/13.2 python=3.9,3.11 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
freebsd python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
macos/13.2 python=3.11 python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
macos python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
rhel/9.2 python=3.9,3.11 become=sudo provider=aws arch=x86_64
rhel become=sudo provider=aws arch=x86_64
ubuntu/22.04 python=3.10 become=sudo provider=aws arch=x86_64
ubuntu become=sudo provider=aws arch=x86_64
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,659 |
no_log module argument processing can change 'changed' task field
|
##### SUMMARY
no_log module argument processing can change 'changed' task field, which is probably an undesirable side-effect
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
unsure, ansible core processing task responses
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.6
ansible 2.7.7
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Debian Linux 10 Buster
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
use simple playbook using single htpasswd module
<!--- Paste example playbooks or commands between quotes below -->
```yaml
- hosts: localhost
tasks:
- htpasswd:
path: /tmp/xx
username: a
password: a
```
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
* subsequent calls to `ansible-playbook playbook.yml` yields to task changed=False
OR
* general notice in ansible documentation of such behavior
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
* every call of the playbook results in task changed=True
* the module 'changed' return value itself it correct according to runtime debugging
* the error depends on the actual value of the `no_log` module argument
* probably, there's an undocumented side-effect in general no_log processing, where it the edge case (value 'a') is found
in several places of the module response and replaced as requested. the issue is that replacing takes place even in task.changed field which should not be touched (imho)
<!--- Paste verbatim command output between quotes -->
```paste below
TASK [htpasswd] **************************************************************************************************************************************************************************************************
task path: /a.yml:4
...
<127.0.0.1> EXEC /bin/sh -c '/usr/bin/python3 /root/.ansible/tmp/ansible-tmp-1613725466.4347775-207888894675995/AnsiballZ_htpasswd.py && sleep 0'
...
changed: [localhost] => {
"changed": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", <<<<<<<<<<<<<<<<<<<<<<<<
"invocation": {
"module_args": {
"attributes": null,
"backup": null,
"content": null,
"create": true,
"crypt_scheme": "********pr_md5_crypt",
"delimiter": null,
"directory_mode": null,
"follow": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"force": null,
"group": null,
"mode": null,
"name": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"owner": null,
"password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"path": "/tmp/xx",
"regexp": null,
"remote_src": null,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"src": null,
"state": "present",
"unsafe_writes": null,
"username": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
}
},
"msg": "******** ********lre********dy present"
}
```
module trace
```paste below
> /root/.ansible/tmp/ansible-tmp-1613724872.5574055-221454552474871/debug_dir/__main__.py(266)main()
-> module.exit_json(msg=msg, changed=changed)
(Pdb) l
261 else:
262 module.fail_json(msg="Invalid state: %s" % state)
263
264 check_file_attrs(module, changed, msg)
265 breakpoint()
266 -> module.exit_json(msg=msg, changed=changed)
267 except Exception as e:
268 module.fail_json(msg=to_native(e))
269
270
271 if __name__ == '__main__':
(Pdb) module
<ansible.module_utils.basic.AnsibleModule object at 0x7f50d5a34c50>
(Pdb) changed
False <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
(Pdb) msg
'a already present'
(Pdb) s
```
|
https://github.com/ansible/ansible/issues/73659
|
https://github.com/ansible/ansible/pull/82217
|
f42984eeb36b092678690e39cd74179f96c8d438
|
6e448edc63ecfdaf3f6ebb2e015e2d3c12dd1d95
| 2021-02-19T09:57:15Z |
python
| 2023-11-16T19:04:34Z |
changelogs/fragments/no_log_booly.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,659 |
no_log module argument processing can change 'changed' task field
|
##### SUMMARY
no_log module argument processing can change 'changed' task field, which is probably an undesirable side-effect
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
unsure, ansible core processing task responses
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.6
ansible 2.7.7
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Debian Linux 10 Buster
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
use simple playbook using single htpasswd module
<!--- Paste example playbooks or commands between quotes below -->
```yaml
- hosts: localhost
tasks:
- htpasswd:
path: /tmp/xx
username: a
password: a
```
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
* subsequent calls to `ansible-playbook playbook.yml` yields to task changed=False
OR
* general notice in ansible documentation of such behavior
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
* every call of the playbook results in task changed=True
* the module 'changed' return value itself it correct according to runtime debugging
* the error depends on the actual value of the `no_log` module argument
* probably, there's an undocumented side-effect in general no_log processing, where it the edge case (value 'a') is found
in several places of the module response and replaced as requested. the issue is that replacing takes place even in task.changed field which should not be touched (imho)
<!--- Paste verbatim command output between quotes -->
```paste below
TASK [htpasswd] **************************************************************************************************************************************************************************************************
task path: /a.yml:4
...
<127.0.0.1> EXEC /bin/sh -c '/usr/bin/python3 /root/.ansible/tmp/ansible-tmp-1613725466.4347775-207888894675995/AnsiballZ_htpasswd.py && sleep 0'
...
changed: [localhost] => {
"changed": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", <<<<<<<<<<<<<<<<<<<<<<<<
"invocation": {
"module_args": {
"attributes": null,
"backup": null,
"content": null,
"create": true,
"crypt_scheme": "********pr_md5_crypt",
"delimiter": null,
"directory_mode": null,
"follow": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"force": null,
"group": null,
"mode": null,
"name": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"owner": null,
"password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"path": "/tmp/xx",
"regexp": null,
"remote_src": null,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"src": null,
"state": "present",
"unsafe_writes": null,
"username": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
}
},
"msg": "******** ********lre********dy present"
}
```
module trace
```paste below
> /root/.ansible/tmp/ansible-tmp-1613724872.5574055-221454552474871/debug_dir/__main__.py(266)main()
-> module.exit_json(msg=msg, changed=changed)
(Pdb) l
261 else:
262 module.fail_json(msg="Invalid state: %s" % state)
263
264 check_file_attrs(module, changed, msg)
265 breakpoint()
266 -> module.exit_json(msg=msg, changed=changed)
267 except Exception as e:
268 module.fail_json(msg=to_native(e))
269
270
271 if __name__ == '__main__':
(Pdb) module
<ansible.module_utils.basic.AnsibleModule object at 0x7f50d5a34c50>
(Pdb) changed
False <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
(Pdb) msg
'a already present'
(Pdb) s
```
|
https://github.com/ansible/ansible/issues/73659
|
https://github.com/ansible/ansible/pull/82217
|
f42984eeb36b092678690e39cd74179f96c8d438
|
6e448edc63ecfdaf3f6ebb2e015e2d3c12dd1d95
| 2021-02-19T09:57:15Z |
python
| 2023-11-16T19:04:34Z |
lib/ansible/module_utils/basic.py
|
# Copyright (c), Michael DeHaan <[email protected]>, 2012-2013
# Copyright (c), Toshio Kuratomi <[email protected]> 2016
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import annotations
import json
import sys
# Used for determining if the system is running a new enough python version
# and should only restrict on our documented minimum versions
_PY_MIN = (3, 7)
if sys.version_info < _PY_MIN:
print(json.dumps(dict(
failed=True,
msg=f"ansible-core requires a minimum of Python version {'.'.join(map(str, _PY_MIN))}. Current version: {''.join(sys.version.splitlines())}",
)))
sys.exit(1)
# Ansible modules can be written in any language.
# The functions available here can be used to do many common tasks,
# to simplify development of Python modules.
import __main__
import atexit
import errno
import datetime
import grp
import fcntl
import locale
import os
import pwd
import platform
import re
import select
import selectors
import shlex
import shutil
import signal
import stat
import subprocess
import tempfile
import time
import traceback
import types
from itertools import chain, repeat
try:
import syslog
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG = False
try:
from systemd import journal, daemon as systemd_daemon
# Makes sure that systemd.journal has method sendv()
# Double check that journal has method sendv (some packages don't)
# check if the system is running under systemd
has_journal = hasattr(journal, 'sendv') and systemd_daemon.booted()
except (ImportError, AttributeError):
# AttributeError would be caused from use of .booted() if wrong systemd
has_journal = False
HAVE_SELINUX = False
try:
from ansible.module_utils.compat import selinux
HAVE_SELINUX = True
except ImportError:
pass
# Python2 & 3 way to get NoneType
NoneType = type(None)
from ._text import to_native, to_bytes, to_text
from ansible.module_utils.common.text.converters import (
jsonify,
container_to_bytes as json_dict_unicode_to_bytes,
container_to_text as json_dict_bytes_to_unicode,
)
from ansible.module_utils.common.arg_spec import ModuleArgumentSpecValidator
from ansible.module_utils.common.text.formatters import (
lenient_lowercase,
bytes_to_human,
human_to_bytes,
SIZE_RANGES,
)
import hashlib
def _get_available_hash_algorithms():
"""Return a dictionary of available hash function names and their associated function."""
try:
# Algorithms available in Python 2.7.9+ and Python 3.2+
# https://docs.python.org/2.7/library/hashlib.html#hashlib.algorithms_available
# https://docs.python.org/3.2/library/hashlib.html#hashlib.algorithms_available
algorithm_names = hashlib.algorithms_available
except AttributeError:
# Algorithms in Python 2.7.x (used only for Python 2.7.0 through 2.7.8)
# https://docs.python.org/2.7/library/hashlib.html#hashlib.hashlib.algorithms
algorithm_names = set(hashlib.algorithms)
algorithms = {}
for algorithm_name in algorithm_names:
algorithm_func = getattr(hashlib, algorithm_name, None)
if algorithm_func:
try:
# Make sure the algorithm is actually available for use.
# Not all algorithms listed as available are actually usable.
# For example, md5 is not available in FIPS mode.
algorithm_func()
except Exception:
pass
else:
algorithms[algorithm_name] = algorithm_func
return algorithms
AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms()
from ansible.module_utils.six.moves.collections_abc import (
KeysView,
Mapping, MutableMapping,
Sequence, MutableSequence,
Set, MutableSet,
)
from ansible.module_utils.common.locale import get_best_parsable_locale
from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.file import (
_PERM_BITS as PERM_BITS,
_EXEC_PERM_BITS as EXEC_PERM_BITS,
_DEFAULT_PERM as DEFAULT_PERM,
is_executable,
format_attributes,
get_flags_from_attributes,
FILE_ATTRIBUTES,
)
from ansible.module_utils.common.sys_info import (
get_distribution,
get_distribution_version,
get_platform_subclass,
)
from ansible.module_utils.common.parameters import (
env_fallback,
remove_values,
sanitize_keys,
DEFAULT_TYPE_VALIDATORS,
PASS_VARS,
PASS_BOOLS,
)
from ansible.module_utils.errors import AnsibleFallbackNotFound, AnsibleValidationErrorMultiple, UnsupportedError
from ansible.module_utils.six import (
PY2,
PY3,
b,
binary_type,
integer_types,
iteritems,
string_types,
text_type,
)
from ansible.module_utils.six.moves import map, reduce, shlex_quote
from ansible.module_utils.common.validation import (
check_missing_parameters,
safe_eval,
)
from ansible.module_utils.common._utils import get_all_subclasses as _get_all_subclasses
from ansible.module_utils.parsing.convert_bool import BOOLEANS, BOOLEANS_FALSE, BOOLEANS_TRUE, boolean
from ansible.module_utils.common.warnings import (
deprecate,
get_deprecation_messages,
get_warning_messages,
warn,
)
# Note: When getting Sequence from collections, it matches with strings. If
# this matters, make sure to check for strings before checking for sequencetype
SEQUENCETYPE = frozenset, KeysView, Sequence
PASSWORD_MATCH = re.compile(r'^(?:.+[-_\s])?pass(?:[-_\s]?(?:word|phrase|wrd|wd)?)(?:[-_\s].+)?$', re.I)
imap = map
try:
# Python 2
unicode # type: ignore[used-before-def] # pylint: disable=used-before-assignment
except NameError:
# Python 3
unicode = text_type
try:
# Python 2
basestring # type: ignore[used-before-def,has-type] # pylint: disable=used-before-assignment
except NameError:
# Python 3
basestring = string_types
# End of deprecated names
# Internal global holding passed in params. This is consulted in case
# multiple AnsibleModules are created. Otherwise each AnsibleModule would
# attempt to read from stdin. Other code should not use this directly as it
# is an internal implementation detail
_ANSIBLE_ARGS = None
FILE_COMMON_ARGUMENTS = dict(
# These are things we want. About setting metadata (mode, ownership, permissions in general) on
# created files (these are used by set_fs_attributes_if_different and included in
# load_file_common_arguments)
mode=dict(type='raw'),
owner=dict(type='str'),
group=dict(type='str'),
seuser=dict(type='str'),
serole=dict(type='str'),
selevel=dict(type='str'),
setype=dict(type='str'),
attributes=dict(type='str', aliases=['attr']),
unsafe_writes=dict(type='bool', default=False, fallback=(env_fallback, ['ANSIBLE_UNSAFE_WRITES'])), # should be available to any module using atomic_move
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
# Used for parsing symbolic file perms
MODE_OPERATOR_RE = re.compile(r'[+=-]')
USERS_RE = re.compile(r'^[ugo]+$')
PERMS_RE = re.compile(r'^[rwxXstugo]*$')
#
# Deprecated functions
#
def get_platform():
'''
**Deprecated** Use :py:func:`platform.system` directly.
:returns: Name of the platform the module is running on in a native string
Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is
the result of calling :py:func:`platform.system`.
'''
return platform.system()
# End deprecated functions
#
# Compat shims
#
def load_platform_subclass(cls, *args, **kwargs):
"""**Deprecated**: Use ansible.module_utils.common.sys_info.get_platform_subclass instead"""
platform_cls = get_platform_subclass(cls)
return super(cls, platform_cls).__new__(platform_cls)
def get_all_subclasses(cls):
"""**Deprecated**: Use ansible.module_utils.common._utils.get_all_subclasses instead"""
return list(_get_all_subclasses(cls))
# End compat shims
def heuristic_log_sanitize(data, no_log_values=None):
''' Remove strings that look like passwords from log messages '''
# Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are
# not passwds
# begin: start of a passwd containing string
# end: end of a passwd containing string
# sep: char between user and passwd
# prev_begin: where in the overall string to start a search for
# a passwd
# sep_search_end: where in the string to end a search for the sep
data = to_native(data)
output = []
begin = len(data)
prev_begin = begin
sep = 1
while sep:
# Find the potential end of a passwd
try:
end = data.rindex('@', 0, begin)
except ValueError:
# No passwd in the rest of the data
output.insert(0, data[0:begin])
break
# Search for the beginning of a passwd
sep = None
sep_search_end = end
while not sep:
# URL-style username+password
try:
begin = data.rindex('://', 0, sep_search_end)
except ValueError:
# No url style in the data, check for ssh style in the
# rest of the string
begin = 0
# Search for separator
try:
sep = data.index(':', begin + 3, end)
except ValueError:
# No separator; choices:
if begin == 0:
# Searched the whole string so there's no password
# here. Return the remaining data
output.insert(0, data[0:prev_begin])
break
# Search for a different beginning of the password field.
sep_search_end = begin
continue
if sep:
# Password was found; remove it.
output.insert(0, data[end:prev_begin])
output.insert(0, '********')
output.insert(0, data[begin:sep + 1])
prev_begin = begin
output = ''.join(output)
if no_log_values:
output = remove_values(output, no_log_values)
return output
def _load_params():
''' read the modules parameters and store them globally.
This function may be needed for certain very dynamic custom modules which
want to process the parameters that are being handed the module. Since
this is so closely tied to the implementation of modules we cannot
guarantee API stability for it (it may change between versions) however we
will try not to break it gratuitously. It is certainly more future-proof
to call this function and consume its outputs than to implement the logic
inside it as a copy in your own code.
'''
global _ANSIBLE_ARGS
if _ANSIBLE_ARGS is not None:
buffer = _ANSIBLE_ARGS
else:
# debug overrides to read args from file or cmdline
# Avoid tracebacks when locale is non-utf8
# We control the args and we pass them as utf8
if len(sys.argv) > 1:
if os.path.isfile(sys.argv[1]):
fd = open(sys.argv[1], 'rb')
buffer = fd.read()
fd.close()
else:
buffer = sys.argv[1]
if PY3:
buffer = buffer.encode('utf-8', errors='surrogateescape')
# default case, read from stdin
else:
if PY2:
buffer = sys.stdin.read()
else:
buffer = sys.stdin.buffer.read()
_ANSIBLE_ARGS = buffer
try:
params = json.loads(buffer.decode('utf-8'))
except ValueError:
# This helper is used too early for fail_json to work.
print('\n{"msg": "Error: Module unable to decode stdin/parameters as valid JSON. Unable to parse what parameters were passed", "failed": true}')
sys.exit(1)
if PY2:
params = json_dict_unicode_to_bytes(params)
try:
return params['ANSIBLE_MODULE_ARGS']
except KeyError:
# This helper does not have access to fail_json so we have to print
# json output on our own.
print('\n{"msg": "Error: Module unable to locate ANSIBLE_MODULE_ARGS in JSON data from stdin. Unable to figure out what parameters were passed", '
'"failed": true}')
sys.exit(1)
def missing_required_lib(library, reason=None, url=None):
hostname = platform.node()
msg = "Failed to import the required Python library (%s) on %s's Python %s." % (library, hostname, sys.executable)
if reason:
msg += " This is required %s." % reason
if url:
msg += " See %s for more info." % url
msg += (" Please read the module documentation and install it in the appropriate location."
" If the required library is installed, but Ansible is using the wrong Python interpreter,"
" please consult the documentation on ansible_python_interpreter")
return msg
class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
mutually_exclusive=None, required_together=None,
required_one_of=None, add_file_common_args=False,
supports_check_mode=False, required_if=None, required_by=None):
'''
Common code for quickly building an ansible module in Python
(although you can write modules with anything that can return JSON).
See :ref:`developing_modules_general` for a general introduction
and :ref:`developing_program_flow_modules` for more detailed explanation.
'''
self._name = os.path.basename(__file__) # initialize name until we can parse from options
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
self.bypass_checks = bypass_checks
self.no_log = no_log
self.mutually_exclusive = mutually_exclusive
self.required_together = required_together
self.required_one_of = required_one_of
self.required_if = required_if
self.required_by = required_by
self.cleanup_files = []
self._debug = False
self._diff = False
self._socket_path = None
self._shell = None
self._syslog_facility = 'LOG_USER'
self._verbosity = 0
# May be used to set modifications to the environment for any
# run_command invocation
self.run_command_environ_update = {}
self._clean = {}
self._string_conversion_action = ''
self.aliases = {}
self._legal_inputs = []
self._options_context = list()
self._tmpdir = None
if add_file_common_args:
for k, v in FILE_COMMON_ARGUMENTS.items():
if k not in self.argument_spec:
self.argument_spec[k] = v
# Save parameter values that should never be logged
self.no_log_values = set()
# check the locale as set by the current environment, and reset to
# a known valid (LANG=C) if it's an invalid/unavailable locale
self._check_locale()
self._load_params()
self._set_internal_properties()
self.validator = ModuleArgumentSpecValidator(self.argument_spec,
self.mutually_exclusive,
self.required_together,
self.required_one_of,
self.required_if,
self.required_by,
)
self.validation_result = self.validator.validate(self.params)
self.params.update(self.validation_result.validated_parameters)
self.no_log_values.update(self.validation_result._no_log_values)
self.aliases.update(self.validation_result._aliases)
try:
error = self.validation_result.errors[0]
if isinstance(error, UnsupportedError) and self._ignore_unknown_opts:
error = None
except IndexError:
error = None
# Fail for validation errors, even in check mode
if error:
msg = self.validation_result.errors.msg
if isinstance(error, UnsupportedError):
msg = "Unsupported parameters for ({name}) {kind}: {msg}".format(name=self._name, kind='module', msg=msg)
self.fail_json(msg=msg)
if self.check_mode and not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module (%s) does not support check mode" % self._name)
# This is for backwards compatibility only.
self._CHECK_ARGUMENT_TYPES_DISPATCHER = DEFAULT_TYPE_VALIDATORS
if not self.no_log:
self._log_invocation()
# selinux state caching
self._selinux_enabled = None
self._selinux_mls_enabled = None
self._selinux_initial_context = None
# finally, make sure we're in a sane working dir
self._set_cwd()
@property
def tmpdir(self):
# if _ansible_tmpdir was not set and we have a remote_tmp,
# the module needs to create it and clean it up once finished.
# otherwise we create our own module tmp dir from the system defaults
if self._tmpdir is None:
basedir = None
if self._remote_tmp is not None:
basedir = os.path.expanduser(os.path.expandvars(self._remote_tmp))
if basedir is not None and not os.path.exists(basedir):
try:
os.makedirs(basedir, mode=0o700)
except (OSError, IOError) as e:
self.warn("Unable to use %s as temporary directory, "
"failing back to system: %s" % (basedir, to_native(e)))
basedir = None
else:
self.warn("Module remote_tmp %s did not exist and was "
"created with a mode of 0700, this may cause"
" issues when running as another user. To "
"avoid this, create the remote_tmp dir with "
"the correct permissions manually" % basedir)
basefile = "ansible-moduletmp-%s-" % time.time()
try:
tmpdir = tempfile.mkdtemp(prefix=basefile, dir=basedir)
except (OSError, IOError) as e:
self.fail_json(
msg="Failed to create remote module tmp path at dir %s "
"with prefix %s: %s" % (basedir, basefile, to_native(e))
)
if not self._keep_remote_files:
atexit.register(shutil.rmtree, tmpdir)
self._tmpdir = tmpdir
return self._tmpdir
def warn(self, warning):
warn(warning)
self.log('[WARNING] %s' % warning)
def deprecate(self, msg, version=None, date=None, collection_name=None):
if version is not None and date is not None:
raise AssertionError("implementation error -- version and date must not both be set")
deprecate(msg, version=version, date=date, collection_name=collection_name)
# For compatibility, we accept that neither version nor date is set,
# and treat that the same as if version would not have been set
if date is not None:
self.log('[DEPRECATION WARNING] %s %s' % (msg, date))
else:
self.log('[DEPRECATION WARNING] %s %s' % (msg, version))
def load_file_common_arguments(self, params, path=None):
'''
many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly
available to all modules and they can share code.
Allows to overwrite the path/dest module argument by providing path.
'''
if path is None:
path = params.get('path', params.get('dest', None))
if path is None:
return {}
else:
path = os.path.expanduser(os.path.expandvars(path))
b_path = to_bytes(path, errors='surrogate_or_strict')
# if the path is a symlink, and we're following links, get
# the target of the link instead for testing
if params.get('follow', False) and os.path.islink(b_path):
b_path = os.path.realpath(b_path)
path = to_native(b_path)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
# selinux related options
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
secontext.append(selevel)
default_secontext = self.selinux_default_context(path)
for i in range(len(default_secontext)):
if i is not None and secontext[i] == '_default':
secontext[i] = default_secontext[i]
attributes = params.get('attributes', None)
return dict(
path=path, mode=mode, owner=owner, group=group,
seuser=seuser, serole=serole, setype=setype,
selevel=selevel, secontext=secontext, attributes=attributes,
)
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
# will get the selevel as part of the context returned
# by selinux.lgetfilecon().
def selinux_mls_enabled(self):
if self._selinux_mls_enabled is None:
self._selinux_mls_enabled = HAVE_SELINUX and selinux.is_selinux_mls_enabled() == 1
return self._selinux_mls_enabled
def selinux_enabled(self):
if self._selinux_enabled is None:
self._selinux_enabled = HAVE_SELINUX and selinux.is_selinux_enabled() == 1
return self._selinux_enabled
# Determine whether we need a placeholder for selevel/mls
def selinux_initial_context(self):
if self._selinux_initial_context is None:
self._selinux_initial_context = [None, None, None]
if self.selinux_mls_enabled():
self._selinux_initial_context.append(None)
return self._selinux_initial_context
# If selinux fails to find a default, return an array of None
def selinux_default_context(self, path, mode=0):
context = self.selinux_initial_context()
if not self.selinux_enabled():
return context
try:
ret = selinux.matchpathcon(to_native(path, errors='surrogate_or_strict'), mode)
except OSError:
return context
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def selinux_context(self, path):
context = self.selinux_initial_context()
if not self.selinux_enabled():
return context
try:
ret = selinux.lgetfilecon_raw(to_native(path, errors='surrogate_or_strict'))
except OSError as e:
if e.errno == errno.ENOENT:
self.fail_json(path=path, msg='path %s does not exist' % path)
else:
self.fail_json(path=path, msg='failed to retrieve selinux context')
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def user_and_group(self, path, expand=True):
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
st = os.lstat(b_path)
uid = st.st_uid
gid = st.st_gid
return (uid, gid)
def find_mount_point(self, path):
'''
Takes a path and returns its mount point
:param path: a string type with a filesystem path
:returns: the path to the mount point as a text type
'''
b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict'))
while not os.path.ismount(b_path):
b_path = os.path.dirname(b_path)
return to_text(b_path, errors='surrogate_or_strict')
def is_special_selinux_path(self, path):
"""
Returns a tuple containing (True, selinux_context) if the given path is on a
NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
mount_data = f.readlines()
f.close()
except Exception:
return (False, None)
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
if to_bytes(path_mount_point) == to_bytes(mount_point):
for fs in self._selinux_special_fs:
if fs in fstype:
special_context = self.selinux_context(path_mount_point)
return (True, special_context)
return (False, None)
def set_default_selinux_context(self, path, changed):
if not self.selinux_enabled():
return changed
context = self.selinux_default_context(path)
return self.set_context_if_different(path, context, False)
def set_context_if_different(self, path, context, changed, diff=None):
if not self.selinux_enabled():
return changed
if self.check_file_absent_if_check_mode(path):
return True
cur_context = self.selinux_context(path)
new_context = list(cur_context)
# Iterate over the current context instead of the
# argument context, which may have selevel.
(is_special_se, sp_context) = self.is_special_selinux_path(path)
if is_special_se:
new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
if context[i] is not None and context[i] != cur_context[i]:
new_context[i] = context[i]
elif context[i] is None:
new_context[i] = cur_context[i]
if cur_context != new_context:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['secontext'] = cur_context
if 'after' not in diff:
diff['after'] = {}
diff['after']['secontext'] = new_context
try:
if self.check_mode:
return True
rc = selinux.lsetfilecon(to_native(path), ':'.join(new_context))
except OSError as e:
self.fail_json(path=path, msg='invalid selinux context: %s' % to_native(e),
new_context=new_context, cur_context=cur_context, input_was=context)
if rc != 0:
self.fail_json(path=path, msg='set selinux context failed')
changed = True
return changed
def set_owner_if_different(self, path, owner, changed, diff=None, expand=True):
if owner is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
orig_uid, orig_gid = self.user_and_group(b_path, expand)
try:
uid = int(owner)
except ValueError:
try:
uid = pwd.getpwnam(owner).pw_uid
except KeyError:
path = to_text(b_path)
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
if orig_uid != uid:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['owner'] = orig_uid
if 'after' not in diff:
diff['after'] = {}
diff['after']['owner'] = uid
if self.check_mode:
return True
try:
os.lchown(b_path, uid, -1)
except (IOError, OSError) as e:
path = to_text(b_path)
self.fail_json(path=path, msg='chown failed: %s' % (to_text(e)))
changed = True
return changed
def set_group_if_different(self, path, group, changed, diff=None, expand=True):
if group is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
orig_uid, orig_gid = self.user_and_group(b_path, expand)
try:
gid = int(group)
except ValueError:
try:
gid = grp.getgrnam(group).gr_gid
except KeyError:
path = to_text(b_path)
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
if orig_gid != gid:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['group'] = orig_gid
if 'after' not in diff:
diff['after'] = {}
diff['after']['group'] = gid
if self.check_mode:
return True
try:
os.lchown(b_path, -1, gid)
except OSError:
path = to_text(b_path)
self.fail_json(path=path, msg='chgrp failed')
changed = True
return changed
def set_mode_if_different(self, path, mode, changed, diff=None, expand=True):
if mode is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
path_stat = os.lstat(b_path)
if not isinstance(mode, int):
try:
mode = int(mode, 8)
except Exception:
try:
mode = self._symbolic_mode_to_octal(path_stat, mode)
except Exception as e:
path = to_text(b_path)
self.fail_json(path=path,
msg="mode must be in octal or symbolic form",
details=to_native(e))
if mode != stat.S_IMODE(mode):
# prevent mode from having extra info or being invalid long number
path = to_text(b_path)
self.fail_json(path=path, msg="Invalid mode supplied, only permission info is allowed", details=mode)
prev_mode = stat.S_IMODE(path_stat.st_mode)
if prev_mode != mode:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['mode'] = '0%03o' % prev_mode
if 'after' not in diff:
diff['after'] = {}
diff['after']['mode'] = '0%03o' % mode
if self.check_mode:
return True
# FIXME: comparison against string above will cause this to be executed
# every time
try:
if hasattr(os, 'lchmod'):
os.lchmod(b_path, mode)
else:
if not os.path.islink(b_path):
os.chmod(b_path, mode)
else:
# Attempt to set the perms of the symlink but be
# careful not to change the perms of the underlying
# file while trying
underlying_stat = os.stat(b_path)
os.chmod(b_path, mode)
new_underlying_stat = os.stat(b_path)
if underlying_stat.st_mode != new_underlying_stat.st_mode:
os.chmod(b_path, stat.S_IMODE(underlying_stat.st_mode))
except OSError as e:
if os.path.islink(b_path) and e.errno in (
errno.EACCES, # can't access symlink in sticky directory (stat)
errno.EPERM, # can't set mode on symbolic links (chmod)
errno.EROFS, # can't set mode on read-only filesystem
):
pass
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise
except Exception as e:
path = to_text(b_path)
self.fail_json(path=path, msg='chmod failed', details=to_native(e),
exception=traceback.format_exc())
path_stat = os.lstat(b_path)
new_mode = stat.S_IMODE(path_stat.st_mode)
if new_mode != prev_mode:
changed = True
return changed
def set_attributes_if_different(self, path, attributes, changed, diff=None, expand=True):
if attributes is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
existing = self.get_file_attributes(b_path, include_version=False)
attr_mod = '='
if attributes.startswith(('-', '+')):
attr_mod = attributes[0]
attributes = attributes[1:]
if existing.get('attr_flags', '') != attributes or attr_mod == '-':
attrcmd = self.get_bin_path('chattr')
if attrcmd:
attrcmd = [attrcmd, '%s%s' % (attr_mod, attributes), b_path]
changed = True
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['attributes'] = existing.get('attr_flags')
if 'after' not in diff:
diff['after'] = {}
diff['after']['attributes'] = '%s%s' % (attr_mod, attributes)
if not self.check_mode:
try:
rc, out, err = self.run_command(attrcmd)
if rc != 0 or err:
raise Exception("Error while setting attributes: %s" % (out + err))
except Exception as e:
self.fail_json(path=to_text(b_path), msg='chattr failed',
details=to_native(e), exception=traceback.format_exc())
return changed
def get_file_attributes(self, path, include_version=True):
output = {}
attrcmd = self.get_bin_path('lsattr', False)
if attrcmd:
flags = '-vd' if include_version else '-d'
attrcmd = [attrcmd, flags, path]
try:
rc, out, err = self.run_command(attrcmd)
if rc == 0:
res = out.split()
attr_flags_idx = 0
if include_version:
attr_flags_idx = 1
output['version'] = res[0].strip()
output['attr_flags'] = res[attr_flags_idx].replace('-', '').strip()
output['attributes'] = format_attributes(output['attr_flags'])
except Exception:
pass
return output
@classmethod
def _symbolic_mode_to_octal(cls, path_stat, symbolic_mode):
"""
This enables symbolic chmod string parsing as stated in the chmod man-page
This includes things like: "u=rw-x+X,g=r-x+X,o=r-x+X"
"""
new_mode = stat.S_IMODE(path_stat.st_mode)
# Now parse all symbolic modes
for mode in symbolic_mode.split(','):
# Per single mode. This always contains a '+', '-' or '='
# Split it on that
permlist = MODE_OPERATOR_RE.split(mode)
# And find all the operators
opers = MODE_OPERATOR_RE.findall(mode)
# The user(s) where it's all about is the first element in the
# 'permlist' list. Take that and remove it from the list.
# An empty user or 'a' means 'all'.
users = permlist.pop(0)
use_umask = (users == '')
if users == 'a' or users == '':
users = 'ugo'
# Check if there are illegal characters in the user list
# They can end up in 'users' because they are not split
if not USERS_RE.match(users):
raise ValueError("bad symbolic permission for mode: %s" % mode)
# Now we have two list of equal length, one contains the requested
# permissions and one with the corresponding operators.
for idx, perms in enumerate(permlist):
# Check if there are illegal characters in the permissions
if not PERMS_RE.match(perms):
raise ValueError("bad symbolic permission for mode: %s" % mode)
for user in users:
mode_to_apply = cls._get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask, new_mode)
new_mode = cls._apply_operation_to_mode(user, opers[idx], mode_to_apply, new_mode)
return new_mode
@staticmethod
def _apply_operation_to_mode(user, operator, mode_to_apply, current_mode):
if operator == '=':
if user == 'u':
mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g':
mask = stat.S_IRWXG | stat.S_ISGID
elif user == 'o':
mask = stat.S_IRWXO | stat.S_ISVTX
# mask out u, g, or o permissions from current_mode and apply new permissions
inverse_mask = mask ^ PERM_BITS
new_mode = (current_mode & inverse_mask) | mode_to_apply
elif operator == '+':
new_mode = current_mode | mode_to_apply
elif operator == '-':
new_mode = current_mode - (current_mode & mode_to_apply)
return new_mode
@staticmethod
def _get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask, prev_mode=None):
if prev_mode is None:
prev_mode = stat.S_IMODE(path_stat.st_mode)
is_directory = stat.S_ISDIR(path_stat.st_mode)
has_x_permissions = (prev_mode & EXEC_PERM_BITS) > 0
apply_X_permission = is_directory or has_x_permissions
# Get the umask, if the 'user' part is empty, the effect is as if (a) were
# given, but bits that are set in the umask are not affected.
# We also need the "reversed umask" for masking
umask = os.umask(0)
os.umask(umask)
rev_umask = umask ^ PERM_BITS
# Permission bits constants documented at:
# https://docs.python.org/3/library/stat.html#stat.S_ISUID
if apply_X_permission:
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
'o': {'X': stat.S_IXOTH},
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
'o': {'X': 0},
}
user_perms_to_modes = {
'u': {
'r': rev_umask & stat.S_IRUSR if use_umask else stat.S_IRUSR,
'w': rev_umask & stat.S_IWUSR if use_umask else stat.S_IWUSR,
'x': rev_umask & stat.S_IXUSR if use_umask else stat.S_IXUSR,
's': stat.S_ISUID,
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
'o': (prev_mode & stat.S_IRWXO) << 6},
'g': {
'r': rev_umask & stat.S_IRGRP if use_umask else stat.S_IRGRP,
'w': rev_umask & stat.S_IWGRP if use_umask else stat.S_IWGRP,
'x': rev_umask & stat.S_IXGRP if use_umask else stat.S_IXGRP,
's': stat.S_ISGID,
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
'o': (prev_mode & stat.S_IRWXO) << 3},
'o': {
'r': rev_umask & stat.S_IROTH if use_umask else stat.S_IROTH,
'w': rev_umask & stat.S_IWOTH if use_umask else stat.S_IWOTH,
'x': rev_umask & stat.S_IXOTH if use_umask else stat.S_IXOTH,
's': 0,
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
'o': prev_mode & stat.S_IRWXO},
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
def or_reduce(mode, perm):
return mode | user_perms_to_modes[user][perm]
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True):
# set modes owners and context as needed
changed = self.set_context_if_different(
file_args['path'], file_args['secontext'], changed, diff
)
changed = self.set_owner_if_different(
file_args['path'], file_args['owner'], changed, diff, expand
)
changed = self.set_group_if_different(
file_args['path'], file_args['group'], changed, diff, expand
)
changed = self.set_mode_if_different(
file_args['path'], file_args['mode'], changed, diff, expand
)
changed = self.set_attributes_if_different(
file_args['path'], file_args['attributes'], changed, diff, expand
)
return changed
def check_file_absent_if_check_mode(self, file_path):
return self.check_mode and not os.path.exists(file_path)
def set_directory_attributes_if_different(self, file_args, changed, diff=None, expand=True):
return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
def set_file_attributes_if_different(self, file_args, changed, diff=None, expand=True):
return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
def add_path_info(self, kwargs):
'''
for results that are files, supplement the info about the file
in the return path with stats about the file path.
'''
path = kwargs.get('path', kwargs.get('dest', None))
if path is None:
return kwargs
b_path = to_bytes(path, errors='surrogate_or_strict')
if os.path.exists(b_path):
(uid, gid) = self.user_and_group(path)
kwargs['uid'] = uid
kwargs['gid'] = gid
try:
user = pwd.getpwuid(uid)[0]
except KeyError:
user = str(uid)
try:
group = grp.getgrgid(gid)[0]
except KeyError:
group = str(gid)
kwargs['owner'] = user
kwargs['group'] = group
st = os.lstat(b_path)
kwargs['mode'] = '0%03o' % stat.S_IMODE(st[stat.ST_MODE])
# secontext not yet supported
if os.path.islink(b_path):
kwargs['state'] = 'link'
elif os.path.isdir(b_path):
kwargs['state'] = 'directory'
elif os.stat(b_path).st_nlink > 1:
kwargs['state'] = 'hard'
else:
kwargs['state'] = 'file'
if self.selinux_enabled():
kwargs['secontext'] = ':'.join(self.selinux_context(path))
kwargs['size'] = st[stat.ST_SIZE]
return kwargs
def _check_locale(self):
'''
Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings)
'''
try:
# setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
# fallback to the 'best' locale, per the function
# final fallback is 'C', which may cause unicode issues
# but is preferable to simply failing on unknown locale
best_locale = get_best_parsable_locale(self)
# need to set several since many tools choose to ignore documented precedence and scope
locale.setlocale(locale.LC_ALL, best_locale)
os.environ['LANG'] = best_locale
os.environ['LC_ALL'] = best_locale
os.environ['LC_MESSAGES'] = best_locale
except Exception as e:
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" %
to_native(e), exception=traceback.format_exc())
def _set_internal_properties(self, argument_spec=None, module_parameters=None):
if argument_spec is None:
argument_spec = self.argument_spec
if module_parameters is None:
module_parameters = self.params
for k in PASS_VARS:
# handle setting internal properties from internal ansible vars
param_key = '_ansible_%s' % k
if param_key in module_parameters:
if k in PASS_BOOLS:
setattr(self, PASS_VARS[k][0], self.boolean(module_parameters[param_key]))
else:
setattr(self, PASS_VARS[k][0], module_parameters[param_key])
# clean up internal top level params:
if param_key in self.params:
del self.params[param_key]
else:
# use defaults if not already set
if not hasattr(self, PASS_VARS[k][0]):
setattr(self, PASS_VARS[k][0], PASS_VARS[k][1])
def safe_eval(self, value, locals=None, include_exceptions=False):
return safe_eval(value, locals, include_exceptions)
def _load_params(self):
''' read the input and set the params attribute.
This method is for backwards compatibility. The guts of the function
were moved out in 2.1 so that custom modules could read the parameters.
'''
# debug overrides to read args from file or cmdline
self.params = _load_params()
def _log_to_syslog(self, msg):
if HAS_SYSLOG:
try:
module = 'ansible-%s' % self._name
facility = getattr(syslog, self._syslog_facility, syslog.LOG_USER)
syslog.openlog(str(module), 0, facility)
syslog.syslog(syslog.LOG_INFO, msg)
except TypeError as e:
self.fail_json(
msg='Failed to log to syslog (%s). To proceed anyway, '
'disable syslog logging by setting no_target_syslog '
'to True in your Ansible config.' % to_native(e),
exception=traceback.format_exc(),
msg_to_log=msg,
)
def debug(self, msg):
if self._debug:
self.log('[debug] %s' % msg)
def log(self, msg, log_args=None):
if not self.no_log:
if log_args is None:
log_args = dict()
module = 'ansible-%s' % self._name
if isinstance(module, binary_type):
module = module.decode('utf-8', 'replace')
# 6655 - allow for accented characters
if not isinstance(msg, (binary_type, text_type)):
raise TypeError("msg should be a string (got %s)" % type(msg))
# We want journal to always take text type
# syslog takes bytes on py2, text type on py3
if isinstance(msg, binary_type):
journal_msg = remove_values(msg.decode('utf-8', 'replace'), self.no_log_values)
else:
# TODO: surrogateescape is a danger here on Py3
journal_msg = remove_values(msg, self.no_log_values)
if PY3:
syslog_msg = journal_msg
else:
syslog_msg = journal_msg.encode('utf-8', 'replace')
if has_journal:
journal_args = [("MODULE", os.path.basename(__file__))]
for arg in log_args:
name, value = (arg.upper(), str(log_args[arg]))
if name in (
'PRIORITY', 'MESSAGE', 'MESSAGE_ID',
'CODE_FILE', 'CODE_LINE', 'CODE_FUNC',
'SYSLOG_FACILITY', 'SYSLOG_IDENTIFIER',
'SYSLOG_PID',
):
name = "_%s" % name
journal_args.append((name, value))
try:
if HAS_SYSLOG:
# If syslog_facility specified, it needs to convert
# from the facility name to the facility code, and
# set it as SYSLOG_FACILITY argument of journal.send()
facility = getattr(syslog,
self._syslog_facility,
syslog.LOG_USER) >> 3
journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
SYSLOG_FACILITY=facility,
**dict(journal_args))
else:
journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
**dict(journal_args))
except IOError:
# fall back to syslog since logging to journal failed
self._log_to_syslog(syslog_msg)
else:
self._log_to_syslog(syslog_msg)
def _log_invocation(self):
''' log that ansible ran the module '''
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
for param in self.params:
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', None)
# try to proactively capture password/passphrase fields
if no_log is None and PASSWORD_MATCH.search(param):
log_args[param] = 'NOT_LOGGING_PASSWORD'
self.warn('Module did not set no_log for %s' % param)
elif self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
else:
param_val = self.params[param]
if not isinstance(param_val, (text_type, binary_type)):
param_val = str(param_val)
elif isinstance(param_val, text_type):
param_val = param_val.encode('utf-8')
log_args[param] = heuristic_log_sanitize(param_val, self.no_log_values)
msg = ['%s=%s' % (to_native(arg), to_native(val)) for arg, val in log_args.items()]
if msg:
msg = 'Invoked with %s' % ' '.join(msg)
else:
msg = 'Invoked'
self.log(msg, log_args=log_args)
def _set_cwd(self):
try:
cwd = os.getcwd()
if not os.access(cwd, os.F_OK | os.R_OK):
raise Exception()
return cwd
except Exception:
# we don't have access to the cwd, probably because of sudo.
# Try and move to a neutral location to prevent errors
for cwd in [self.tmpdir, os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
if os.access(cwd, os.F_OK | os.R_OK):
os.chdir(cwd)
return cwd
except Exception:
pass
# we won't error here, as it may *not* be a problem,
# and we don't want to break modules unnecessarily
return None
def get_bin_path(self, arg, required=False, opt_dirs=None):
'''
Find system executable in PATH.
:param arg: The executable to find.
:param required: if executable is not found and required is ``True``, fail_json
:param opt_dirs: optional list of directories to search in addition to ``PATH``
:returns: if found return full path; otherwise return None
'''
bin_path = None
try:
bin_path = get_bin_path(arg=arg, opt_dirs=opt_dirs)
except ValueError as e:
if required:
self.fail_json(msg=to_text(e))
else:
return bin_path
return bin_path
def boolean(self, arg):
'''Convert the argument to a boolean'''
if arg is None:
return arg
try:
return boolean(arg)
except TypeError as e:
self.fail_json(msg=to_native(e))
def jsonify(self, data):
try:
return jsonify(data)
except UnicodeError as e:
self.fail_json(msg=to_text(e))
def from_json(self, data):
return json.loads(data)
def add_cleanup_file(self, path):
if path not in self.cleanup_files:
self.cleanup_files.append(path)
def do_cleanup_files(self):
for path in self.cleanup_files:
self.cleanup(path)
def _return_formatted(self, kwargs):
self.add_path_info(kwargs)
if 'invocation' not in kwargs:
kwargs['invocation'] = {'module_args': self.params}
if 'warnings' in kwargs:
if isinstance(kwargs['warnings'], list):
for w in kwargs['warnings']:
self.warn(w)
else:
self.warn(kwargs['warnings'])
warnings = get_warning_messages()
if warnings:
kwargs['warnings'] = warnings
if 'deprecations' in kwargs:
if isinstance(kwargs['deprecations'], list):
for d in kwargs['deprecations']:
if isinstance(d, SEQUENCETYPE) and len(d) == 2:
self.deprecate(d[0], version=d[1])
elif isinstance(d, Mapping):
self.deprecate(d['msg'], version=d.get('version'), date=d.get('date'),
collection_name=d.get('collection_name'))
else:
self.deprecate(d) # pylint: disable=ansible-deprecated-no-version
else:
self.deprecate(kwargs['deprecations']) # pylint: disable=ansible-deprecated-no-version
deprecations = get_deprecation_messages()
if deprecations:
kwargs['deprecations'] = deprecations
kwargs = remove_values(kwargs, self.no_log_values)
print('\n%s' % self.jsonify(kwargs))
def exit_json(self, **kwargs):
''' return from the module, without error '''
self.do_cleanup_files()
self._return_formatted(kwargs)
sys.exit(0)
def fail_json(self, msg, **kwargs):
''' return from the module, with an error message '''
kwargs['failed'] = True
kwargs['msg'] = msg
# Add traceback if debug or high verbosity and it is missing
# NOTE: Badly named as exception, it really always has been a traceback
if 'exception' not in kwargs and sys.exc_info()[2] and (self._debug or self._verbosity >= 3):
if PY2:
# On Python 2 this is the last (stack frame) exception and as such may be unrelated to the failure
kwargs['exception'] = 'WARNING: The below traceback may *not* be related to the actual failure.\n' +\
''.join(traceback.format_tb(sys.exc_info()[2]))
else:
kwargs['exception'] = ''.join(traceback.format_tb(sys.exc_info()[2]))
self.do_cleanup_files()
self._return_formatted(kwargs)
sys.exit(1)
def fail_on_missing_params(self, required_params=None):
if not required_params:
return
try:
check_missing_parameters(self.params, required_params)
except TypeError as e:
self.fail_json(msg=to_native(e))
def digest_from_file(self, filename, algorithm):
''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
b_filename = to_bytes(filename, errors='surrogate_or_strict')
if not os.path.exists(b_filename):
return None
if os.path.isdir(b_filename):
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
# preserve old behaviour where the third parameter was a hash algorithm object
if hasattr(algorithm, 'hexdigest'):
digest_method = algorithm
else:
try:
digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]()
except KeyError:
self.fail_json(msg="Could not hash file '%s' with algorithm '%s'. Available algorithms: %s" %
(filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
blocksize = 64 * 1024
infile = open(os.path.realpath(b_filename), 'rb')
block = infile.read(blocksize)
while block:
digest_method.update(block)
block = infile.read(blocksize)
infile.close()
return digest_method.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for:
1) Optional backwards compatibility
2) Compatibility with a third party protocol
This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead.
'''
if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, 'md5')
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha1')
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha256')
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
backupdest = ''
if os.path.exists(fn):
# backups named basename.PID.YYYY-MM-DD@HH:MM:SS~
ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
backupdest = '%s.%s.%s' % (fn, os.getpid(), ext)
try:
self.preserved_copy(fn, backupdest)
except (shutil.Error, IOError) as e:
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, to_native(e)))
return backupdest
def cleanup(self, tmpfile):
if os.path.exists(tmpfile):
try:
os.unlink(tmpfile)
except OSError as e:
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, to_native(e)))
def preserved_copy(self, src, dest):
"""Copy a file with preserved ownership, permissions and context"""
# shutil.copy2(src, dst)
# Similar to shutil.copy(), but metadata is copied as well - in fact,
# this is just shutil.copy() followed by copystat(). This is similar
# to the Unix command cp -p.
#
# shutil.copystat(src, dst)
# Copy the permission bits, last access time, last modification time,
# and flags from src to dst. The file contents, owner, and group are
# unaffected. src and dst are path names given as strings.
shutil.copy2(src, dest)
# Set the context
if self.selinux_enabled():
context = self.selinux_context(src)
self.set_context_if_different(dest, context, False)
# chown it
try:
dest_stat = os.stat(src)
tmp_stat = os.stat(dest)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(dest, dest_stat.st_uid, dest_stat.st_gid)
except OSError as e:
if e.errno != errno.EPERM:
raise
# Set the attributes
current_attribs = self.get_file_attributes(src, include_version=False)
current_attribs = current_attribs.get('attr_flags', '')
self.set_attributes_if_different(dest, current_attribs, True)
def atomic_move(self, src, dest, unsafe_writes=False):
'''atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible'''
context = None
dest_stat = None
b_src = to_bytes(src, errors='surrogate_or_strict')
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if os.path.exists(b_dest):
try:
dest_stat = os.stat(b_dest)
# copy mode and ownership
os.chmod(b_src, dest_stat.st_mode & PERM_BITS)
os.chown(b_src, dest_stat.st_uid, dest_stat.st_gid)
# try to copy flags if possible
if hasattr(os, 'chflags') and hasattr(dest_stat, 'st_flags'):
try:
os.chflags(b_src, dest_stat.st_flags)
except OSError as e:
for err in 'EOPNOTSUPP', 'ENOTSUP':
if hasattr(errno, err) and e.errno == getattr(errno, err):
break
else:
raise
except OSError as e:
if e.errno != errno.EPERM:
raise
if self.selinux_enabled():
context = self.selinux_context(dest)
else:
if self.selinux_enabled():
context = self.selinux_default_context(dest)
creating = not os.path.exists(b_dest)
try:
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(b_src, b_dest)
except (IOError, OSError) as e:
if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY, errno.EBUSY]:
# only try workarounds for errno 18 (cross device), 1 (not permitted), 13 (permission denied)
# and 26 (text file busy) which happens on vagrant synced folders and other 'exotic' non posix file systems
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
else:
# Use bytes here. In the shippable CI, this fails with
# a UnicodeError with surrogateescape'd strings for an unknown
# reason (doesn't happen in a local Ubuntu16.04 VM)
b_dest_dir = os.path.dirname(b_dest)
b_suffix = os.path.basename(b_dest)
error_msg = None
tmp_dest_name = None
try:
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=b'.ansible_tmp', dir=b_dest_dir, suffix=b_suffix)
except (OSError, IOError) as e:
error_msg = 'The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), to_native(e))
finally:
if error_msg:
if unsafe_writes:
self._unsafe_writes(b_src, b_dest)
else:
self.fail_json(msg=error_msg, exception=traceback.format_exc())
if tmp_dest_name:
b_tmp_dest_name = to_bytes(tmp_dest_name, errors='surrogate_or_strict')
try:
try:
# close tmp file handle before file operations to prevent text file busy errors on vboxfs synced folders (windows host)
os.close(tmp_dest_fd)
# leaves tmp file behind when sudo and not root
try:
shutil.move(b_src, b_tmp_dest_name)
except OSError:
# cleanup will happen by 'rm' of tmpdir
# copy2 will preserve some metadata
shutil.copy2(b_src, b_tmp_dest_name)
if self.selinux_enabled():
self.set_context_if_different(
b_tmp_dest_name, context, False)
try:
tmp_stat = os.stat(b_tmp_dest_name)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(b_tmp_dest_name, dest_stat.st_uid, dest_stat.st_gid)
except OSError as e:
if e.errno != errno.EPERM:
raise
try:
os.rename(b_tmp_dest_name, b_dest)
except (shutil.Error, OSError, IOError) as e:
if unsafe_writes and e.errno == errno.EBUSY:
self._unsafe_writes(b_tmp_dest_name, b_dest)
else:
self.fail_json(msg='Unable to make %s into to %s, failed final rename from %s: %s' %
(src, dest, b_tmp_dest_name, to_native(e)), exception=traceback.format_exc())
except (shutil.Error, OSError, IOError) as e:
if unsafe_writes:
self._unsafe_writes(b_src, b_dest)
else:
self.fail_json(msg='Failed to replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
finally:
self.cleanup(b_tmp_dest_name)
if creating:
# make sure the file has the correct permissions
# based on the current value of umask
umask = os.umask(0)
os.umask(umask)
os.chmod(b_dest, DEFAULT_PERM & ~umask)
try:
os.chown(b_dest, os.geteuid(), os.getegid())
except OSError:
# We're okay with trying our best here. If the user is not
# root (or old Unices) they won't be able to chown.
pass
if self.selinux_enabled():
# rename might not preserve context
self.set_context_if_different(dest, context, False)
def _unsafe_writes(self, src, dest):
# sadly there are some situations where we cannot ensure atomicity, but only if
# the user insists and we get the appropriate error we update the file unsafely
try:
out_dest = in_src = None
try:
out_dest = open(dest, 'wb')
in_src = open(src, 'rb')
shutil.copyfileobj(in_src, out_dest)
finally: # assuring closed files in 2.4 compatible way
if out_dest:
out_dest.close()
if in_src:
in_src.close()
except (shutil.Error, OSError, IOError) as e:
self.fail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, to_native(e)),
exception=traceback.format_exc())
def _clean_args(self, args):
if not self._clean:
# create a printable version of the command for use in reporting later,
# which strips out things like passwords from the args list
to_clean_args = args
if PY2:
if isinstance(args, text_type):
to_clean_args = to_bytes(args)
else:
if isinstance(args, binary_type):
to_clean_args = to_text(args)
if isinstance(args, (text_type, binary_type)):
to_clean_args = shlex.split(to_clean_args)
clean_args = []
is_passwd = False
for arg in (to_native(a) for a in to_clean_args):
if is_passwd:
is_passwd = False
clean_args.append('********')
continue
if PASSWD_ARG_RE.match(arg):
sep_idx = arg.find('=')
if sep_idx > -1:
clean_args.append('%s=********' % arg[:sep_idx])
continue
else:
is_passwd = True
arg = heuristic_log_sanitize(arg, self.no_log_values)
clean_args.append(arg)
self._clean = ' '.join(shlex_quote(arg) for arg in clean_args)
return self._clean
def _restore_signal_handlers(self):
# Reset SIGPIPE to SIG_DFL, otherwise in Python2.7 it gets ignored in subprocesses.
if PY2 and sys.platform != 'win32':
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None,
use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict',
expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None, ignore_invalid_cwd=True, handle_exceptions=True):
'''
Execute a command, returns rc, stdout, and stderr.
The mechanism of this method for reading stdout and stderr differs from
that of CPython subprocess.Popen.communicate, in that this method will
stop reading once the spawned command has exited and stdout and stderr
have been consumed, as opposed to waiting until stdout/stderr are
closed. This can be an important distinction, when taken into account
that a forked or backgrounded process may hold stdout or stderr open
for longer than the spawned command.
:arg args: is the command to run
* If args is a list, the command will be run with shell=False.
* If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
* If args is a string and use_unsafe_shell=True it runs with shell=True.
:kw check_rc: Whether to call fail_json in case of non zero RC.
Default False
:kw close_fds: See documentation for subprocess.Popen(). Default True
:kw executable: See documentation for subprocess.Popen(). Default None
:kw data: If given, information to write to the stdin of the command
:kw binary_data: If False, append a newline to the data. Default False
:kw path_prefix: If given, additional path to find the command in.
This adds to the PATH environment variable so helper commands in
the same directory can also be found
:kw cwd: If given, working directory to run the command inside
:kw use_unsafe_shell: See `args` parameter. Default False
:kw prompt_regex: Regex string (not a compiled regex) which can be
used to detect prompts in the stdout which would otherwise cause
the execution to hang (especially if no input data is specified)
:kw environ_update: dictionary to *update* environ variables with
:kw umask: Umask to be used when running the command. Default None
:kw encoding: Since we return native strings, on python3 we need to
know the encoding to use to transform from bytes to text. If you
want to always get bytes back, use encoding=None. The default is
"utf-8". This does not affect transformation of strings given as
args.
:kw errors: Since we return native strings, on python3 we need to
transform stdout and stderr from bytes to text. If the bytes are
undecodable in the ``encoding`` specified, then use this error
handler to deal with them. The default is ``surrogate_or_strict``
which means that the bytes will be decoded using the
surrogateescape error handler if available (available on all
python3 versions we support) otherwise a UnicodeError traceback
will be raised. This does not affect transformations of strings
given as args.
:kw expand_user_and_vars: When ``use_unsafe_shell=False`` this argument
dictates whether ``~`` is expanded in paths and environment variables
are expanded before running the command. When ``True`` a string such as
``$SHELL`` will be expanded regardless of escaping. When ``False`` and
``use_unsafe_shell=False`` no path or variable expansion will be done.
:kw pass_fds: When running on Python 3 this argument
dictates which file descriptors should be passed
to an underlying ``Popen`` constructor. On Python 2, this will
set ``close_fds`` to False.
:kw before_communicate_callback: This function will be called
after ``Popen`` object will be created
but before communicating to the process.
(``Popen`` object will be passed to callback as a first argument)
:kw ignore_invalid_cwd: This flag indicates whether an invalid ``cwd``
(non-existent or not a directory) should be ignored or should raise
an exception.
:kw handle_exceptions: This flag indicates whether an exception will
be handled inline and issue a failed_json or if the caller should
handle it.
:returns: A 3-tuple of return code (integer), stdout (native string),
and stderr (native string). On python2, stdout and stderr are both
byte strings. On python3, stdout and stderr are text strings converted
according to the encoding and errors parameters. If you want byte
strings on python3, use encoding=None to turn decoding to text off.
'''
# used by clean args later on
self._clean = None
if not isinstance(args, (list, binary_type, text_type)):
msg = "Argument 'args' to run_command must be list or string"
self.fail_json(rc=257, cmd=args, msg=msg)
shell = False
if use_unsafe_shell:
# stringify args for unsafe/direct shell usage
if isinstance(args, list):
args = b" ".join([to_bytes(shlex_quote(x), errors='surrogate_or_strict') for x in args])
else:
args = to_bytes(args, errors='surrogate_or_strict')
# not set explicitly, check if set by controller
if executable:
executable = to_bytes(executable, errors='surrogate_or_strict')
args = [executable, b'-c', args]
elif self._shell not in (None, '/bin/sh'):
args = [to_bytes(self._shell, errors='surrogate_or_strict'), b'-c', args]
else:
shell = True
else:
# ensure args are a list
if isinstance(args, (binary_type, text_type)):
# On python2.6 and below, shlex has problems with text type
# On python3, shlex needs a text type.
if PY2:
args = to_bytes(args, errors='surrogate_or_strict')
elif PY3:
args = to_text(args, errors='surrogateescape')
args = shlex.split(args)
# expand ``~`` in paths, and all environment vars
if expand_user_and_vars:
args = [to_bytes(os.path.expanduser(os.path.expandvars(x)), errors='surrogate_or_strict') for x in args if x is not None]
else:
args = [to_bytes(x, errors='surrogate_or_strict') for x in args if x is not None]
prompt_re = None
if prompt_regex:
if isinstance(prompt_regex, text_type):
if PY3:
prompt_regex = to_bytes(prompt_regex, errors='surrogateescape')
elif PY2:
prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict')
try:
prompt_re = re.compile(prompt_regex, re.MULTILINE)
except re.error:
self.fail_json(msg="invalid prompt regular expression given to run_command")
rc = 0
msg = None
st_in = None
env = os.environ.copy()
# We can set this from both an attribute and per call
env.update(self.run_command_environ_update or {})
env.update(environ_update or {})
if path_prefix:
path = env.get('PATH', '')
if path:
env['PATH'] = "%s:%s" % (path_prefix, path)
else:
env['PATH'] = path_prefix
# If using test-module.py and explode, the remote lib path will resemble:
# /tmp/test_module_scratch/debug_dir/ansible/module_utils/basic.py
# If using ansible or ansible-playbook with a remote system:
# /tmp/ansible_vmweLQ/ansible_modlib.zip/ansible/module_utils/basic.py
# Clean out python paths set by ansiballz
if 'PYTHONPATH' in env:
pypaths = [x for x in env['PYTHONPATH'].split(':')
if x and
not x.endswith('/ansible_modlib.zip') and
not x.endswith('/debug_dir')]
if pypaths and any(pypaths):
env['PYTHONPATH'] = ':'.join(pypaths)
if data:
st_in = subprocess.PIPE
def preexec():
self._restore_signal_handlers()
if umask:
os.umask(umask)
kwargs = dict(
executable=executable,
shell=shell,
close_fds=close_fds,
stdin=st_in,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=preexec,
env=env,
)
if PY3 and pass_fds:
kwargs["pass_fds"] = pass_fds
elif PY2 and pass_fds:
kwargs['close_fds'] = False
# make sure we're in the right working directory
if cwd:
cwd = to_bytes(os.path.abspath(os.path.expanduser(cwd)), errors='surrogate_or_strict')
if os.path.isdir(cwd):
kwargs['cwd'] = cwd
elif not ignore_invalid_cwd:
self.fail_json(msg="Provided cwd is not a valid directory: %s" % cwd)
try:
if self._debug:
self.log('Executing: ' + self._clean_args(args))
cmd = subprocess.Popen(args, **kwargs)
if before_communicate_callback:
before_communicate_callback(cmd)
stdout = b''
stderr = b''
# Mirror the CPython subprocess logic and preference for the selector to use.
# poll/select have the advantage of not requiring any extra file
# descriptor, contrarily to epoll/kqueue (also, they require a single
# syscall).
if hasattr(selectors, 'PollSelector'):
selector = selectors.PollSelector()
else:
selector = selectors.SelectSelector()
if data:
if not binary_data:
data += '\n'
if isinstance(data, text_type):
data = to_bytes(data)
selector.register(cmd.stdout, selectors.EVENT_READ)
selector.register(cmd.stderr, selectors.EVENT_READ)
if os.name == 'posix':
fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
if data:
cmd.stdin.write(data)
cmd.stdin.close()
while True:
# A timeout of 1 is both a little short and a little long.
# With None we could deadlock, with a lower value we would
# waste cycles. As it is, this is a mild inconvenience if
# we need to exit, and likely doesn't waste too many cycles
events = selector.select(1)
stdout_changed = False
for key, event in events:
b_chunk = key.fileobj.read(32768)
if not b_chunk:
selector.unregister(key.fileobj)
elif key.fileobj == cmd.stdout:
stdout += b_chunk
stdout_changed = True
elif key.fileobj == cmd.stderr:
stderr += b_chunk
# if we're checking for prompts, do it now, but only if stdout
# actually changed since the last loop
if prompt_re and stdout_changed and prompt_re.search(stdout) and not data:
if encoding:
stdout = to_native(stdout, encoding=encoding, errors=errors)
return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
# break out if no pipes are left to read or the pipes are completely read
# and the process is terminated
if (not events or not selector.get_map()) and cmd.poll() is not None:
break
# No pipes are left to read but process is not yet terminated
# Only then it is safe to wait for the process to be finished
# NOTE: Actually cmd.poll() is always None here if no selectors are left
elif not selector.get_map() and cmd.poll() is None:
cmd.wait()
# The process is terminated. Since no pipes to read from are
# left, there is no need to call select() again.
break
cmd.stdout.close()
cmd.stderr.close()
selector.close()
rc = cmd.returncode
except (OSError, IOError) as e:
self.log("Error Executing CMD:%s Exception:%s" % (self._clean_args(args), to_native(e)))
if handle_exceptions:
self.fail_json(rc=e.errno, stdout=b'', stderr=b'', msg=to_native(e), cmd=self._clean_args(args))
else:
raise e
except Exception as e:
self.log("Error Executing CMD:%s Exception:%s" % (self._clean_args(args), to_native(traceback.format_exc())))
if handle_exceptions:
self.fail_json(rc=257, stdout=b'', stderr=b'', msg=to_native(e), exception=traceback.format_exc(), cmd=self._clean_args(args))
else:
raise e
if rc != 0 and check_rc:
msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values)
self.fail_json(cmd=self._clean_args(args), rc=rc, stdout=stdout, stderr=stderr, msg=msg)
if encoding is not None:
return (rc, to_native(stdout, encoding=encoding, errors=errors),
to_native(stderr, encoding=encoding, errors=errors))
return (rc, stdout, stderr)
def append_to_file(self, filename, str):
filename = os.path.expandvars(os.path.expanduser(filename))
fh = open(filename, 'a')
fh.write(str)
fh.close()
def bytes_to_human(self, size):
return bytes_to_human(size)
# for backwards compatibility
pretty_bytes = bytes_to_human
def human_to_bytes(self, number, isbits=False):
return human_to_bytes(number, isbits)
#
# Backwards compat
#
# In 2.0, moved from inside the module to the toplevel
is_executable = is_executable
@staticmethod
def get_buffer_size(fd):
try:
# 1032 == FZ_GETPIPE_SZ
buffer_size = fcntl.fcntl(fd, 1032)
except Exception:
try:
# not as exact as above, but should be good enough for most platforms that fail the previous call
buffer_size = select.PIPE_BUF
except Exception:
buffer_size = 9000 # use sane default JIC
return buffer_size
def get_module_path():
return os.path.dirname(os.path.realpath(__file__))
def __getattr__(importable_name):
"""Inject import-time deprecation warnings.
Specifically, for ``literal_eval()``, ``_literal_eval()``
and ``get_exception()``.
"""
if importable_name == 'get_exception':
deprecate(
msg=f'The `ansible.module_utils.basic.'
f'{importable_name}` function is deprecated.',
version='2.19',
)
from ansible.module_utils.pycompat24 import get_exception
return get_exception
if importable_name in {'literal_eval', '_literal_eval'}:
deprecate(
msg=f'The `ansible.module_utils.basic.'
f'{importable_name}` function is deprecated.',
version='2.19',
)
from ast import literal_eval
return literal_eval
raise AttributeError(
f'cannot import name {importable_name !r} '
f'has no attribute ({__file__ !s})',
)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,659 |
no_log module argument processing can change 'changed' task field
|
##### SUMMARY
no_log module argument processing can change 'changed' task field, which is probably an undesirable side-effect
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
unsure, ansible core processing task responses
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.6
ansible 2.7.7
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Debian Linux 10 Buster
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
use simple playbook using single htpasswd module
<!--- Paste example playbooks or commands between quotes below -->
```yaml
- hosts: localhost
tasks:
- htpasswd:
path: /tmp/xx
username: a
password: a
```
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
* subsequent calls to `ansible-playbook playbook.yml` yields to task changed=False
OR
* general notice in ansible documentation of such behavior
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
* every call of the playbook results in task changed=True
* the module 'changed' return value itself it correct according to runtime debugging
* the error depends on the actual value of the `no_log` module argument
* probably, there's an undocumented side-effect in general no_log processing, where it the edge case (value 'a') is found
in several places of the module response and replaced as requested. the issue is that replacing takes place even in task.changed field which should not be touched (imho)
<!--- Paste verbatim command output between quotes -->
```paste below
TASK [htpasswd] **************************************************************************************************************************************************************************************************
task path: /a.yml:4
...
<127.0.0.1> EXEC /bin/sh -c '/usr/bin/python3 /root/.ansible/tmp/ansible-tmp-1613725466.4347775-207888894675995/AnsiballZ_htpasswd.py && sleep 0'
...
changed: [localhost] => {
"changed": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", <<<<<<<<<<<<<<<<<<<<<<<<
"invocation": {
"module_args": {
"attributes": null,
"backup": null,
"content": null,
"create": true,
"crypt_scheme": "********pr_md5_crypt",
"delimiter": null,
"directory_mode": null,
"follow": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"force": null,
"group": null,
"mode": null,
"name": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"owner": null,
"password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"path": "/tmp/xx",
"regexp": null,
"remote_src": null,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"src": null,
"state": "present",
"unsafe_writes": null,
"username": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
}
},
"msg": "******** ********lre********dy present"
}
```
module trace
```paste below
> /root/.ansible/tmp/ansible-tmp-1613724872.5574055-221454552474871/debug_dir/__main__.py(266)main()
-> module.exit_json(msg=msg, changed=changed)
(Pdb) l
261 else:
262 module.fail_json(msg="Invalid state: %s" % state)
263
264 check_file_attrs(module, changed, msg)
265 breakpoint()
266 -> module.exit_json(msg=msg, changed=changed)
267 except Exception as e:
268 module.fail_json(msg=to_native(e))
269
270
271 if __name__ == '__main__':
(Pdb) module
<ansible.module_utils.basic.AnsibleModule object at 0x7f50d5a34c50>
(Pdb) changed
False <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
(Pdb) msg
'a already present'
(Pdb) s
```
|
https://github.com/ansible/ansible/issues/73659
|
https://github.com/ansible/ansible/pull/82217
|
f42984eeb36b092678690e39cd74179f96c8d438
|
6e448edc63ecfdaf3f6ebb2e015e2d3c12dd1d95
| 2021-02-19T09:57:15Z |
python
| 2023-11-16T19:04:34Z |
test/integration/targets/module_no_log/library/module_that_has_secret.py
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 73,659 |
no_log module argument processing can change 'changed' task field
|
##### SUMMARY
no_log module argument processing can change 'changed' task field, which is probably an undesirable side-effect
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
unsure, ansible core processing task responses
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible 2.10.6
ansible 2.7.7
```
##### CONFIGURATION
<!--- Paste verbatim output from "ansible-config dump --only-changed" between quotes -->
```paste below
```
##### OS / ENVIRONMENT
<!--- Provide all relevant information below, e.g. target OS versions, network device firmware, etc. -->
Debian Linux 10 Buster
##### STEPS TO REPRODUCE
<!--- Describe exactly how to reproduce the problem, using a minimal test-case -->
use simple playbook using single htpasswd module
<!--- Paste example playbooks or commands between quotes below -->
```yaml
- hosts: localhost
tasks:
- htpasswd:
path: /tmp/xx
username: a
password: a
```
<!--- HINT: You can paste gist.github.com links for larger files -->
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
* subsequent calls to `ansible-playbook playbook.yml` yields to task changed=False
OR
* general notice in ansible documentation of such behavior
##### ACTUAL RESULTS
<!--- Describe what actually happened. If possible run with extra verbosity (-vvvv) -->
* every call of the playbook results in task changed=True
* the module 'changed' return value itself it correct according to runtime debugging
* the error depends on the actual value of the `no_log` module argument
* probably, there's an undocumented side-effect in general no_log processing, where it the edge case (value 'a') is found
in several places of the module response and replaced as requested. the issue is that replacing takes place even in task.changed field which should not be touched (imho)
<!--- Paste verbatim command output between quotes -->
```paste below
TASK [htpasswd] **************************************************************************************************************************************************************************************************
task path: /a.yml:4
...
<127.0.0.1> EXEC /bin/sh -c '/usr/bin/python3 /root/.ansible/tmp/ansible-tmp-1613725466.4347775-207888894675995/AnsiballZ_htpasswd.py && sleep 0'
...
changed: [localhost] => {
"changed": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", <<<<<<<<<<<<<<<<<<<<<<<<
"invocation": {
"module_args": {
"attributes": null,
"backup": null,
"content": null,
"create": true,
"crypt_scheme": "********pr_md5_crypt",
"delimiter": null,
"directory_mode": null,
"follow": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"force": null,
"group": null,
"mode": null,
"name": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"owner": null,
"password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"path": "/tmp/xx",
"regexp": null,
"remote_src": null,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"src": null,
"state": "present",
"unsafe_writes": null,
"username": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
}
},
"msg": "******** ********lre********dy present"
}
```
module trace
```paste below
> /root/.ansible/tmp/ansible-tmp-1613724872.5574055-221454552474871/debug_dir/__main__.py(266)main()
-> module.exit_json(msg=msg, changed=changed)
(Pdb) l
261 else:
262 module.fail_json(msg="Invalid state: %s" % state)
263
264 check_file_attrs(module, changed, msg)
265 breakpoint()
266 -> module.exit_json(msg=msg, changed=changed)
267 except Exception as e:
268 module.fail_json(msg=to_native(e))
269
270
271 if __name__ == '__main__':
(Pdb) module
<ansible.module_utils.basic.AnsibleModule object at 0x7f50d5a34c50>
(Pdb) changed
False <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
(Pdb) msg
'a already present'
(Pdb) s
```
|
https://github.com/ansible/ansible/issues/73659
|
https://github.com/ansible/ansible/pull/82217
|
f42984eeb36b092678690e39cd74179f96c8d438
|
6e448edc63ecfdaf3f6ebb2e015e2d3c12dd1d95
| 2021-02-19T09:57:15Z |
python
| 2023-11-16T19:04:34Z |
test/integration/targets/module_no_log/tasks/main.yml
|
- name: Detect syslog
stat:
path: /var/log/syslog
register: syslog
- name: Detect journalctl
shell: command -V journalctl
ignore_errors: yes
changed_when: no
register: journalctl
- block:
- name: Skip tests if logs were not found.
debug:
msg: Did not find /var/log/syslog or journalctl. Tests will be skipped.
- meta: end_play
when: journalctl is failed and not syslog.stat.exists
- name: Generate random numbers for unique log entries
set_fact:
good_number: "{{ 999999999999 | random }}"
bad_number: "{{ 999999999999 | random }}"
- name: Generate expected log entry messages
set_fact:
good_message: 'My number is: ({{ good_number }})'
bad_message: 'My number is: ({{ bad_number }})'
- name: Generate log message search patterns
set_fact:
# these search patterns are designed to avoid matching themselves
good_search: '{{ good_message.replace(":", "[:]") }}'
bad_search: '{{ bad_message.replace(":", "[:]") }}'
- name: Generate grep command
set_fact:
grep_command: "grep -e '{{ good_search }}' -e '{{ bad_search }}'"
- name: Run a module that logs without no_log
module_that_logs:
number: "{{ good_number }}"
- name: Run a module that logs with no_log
module_that_logs:
number: "{{ bad_number }}"
no_log: yes
- name: Search for expected log messages
# if this fails the tests are probably running on a system which stores logs elsewhere
shell: "({{ grep_command }} /var/log/syslog) || (journalctl | {{ grep_command }})"
changed_when: no
register: grep
- name: Verify the correct log messages were found
assert:
that:
# if the good message is not found then the cause is likely one of:
# 1) the remote system does not write user.info messages to the logs
# 2) the AnsibleModule.log method is not working
- good_message in grep.stdout
- bad_message not in grep.stdout
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,226 |
ANSIBLE_LOG_PATH no longer works since #81692 got merged
|
### Summary
If you run `ANSIBLE_LOG_PATH=test ansible localhost -m setup`, `test` is now an empty file. Before #81692 got merged, it contained log output like
```
023-11-16 08:01:52,262 p=313108 u=felix n=ansible | [WARNING]: You are running the development version of Ansible. You should only run Ansible from "devel" if you are modifying the Ansible engine, or trying out features under development.
This is a rapidly changing source of code and can become unstable at any point.
2023-11-16 08:01:52,383 p=313108 u=felix n=ansible | [WARNING]: No inventory was parsed, only implicit localhost is available
2023-11-16 08:01:53,511 p=313108 u=felix n=ansible | localhost | SUCCESS => {
"ansible_facts": {
...
```
### Issue Type
Bug Report
### Component Name
logging
### Ansible Version
```console
latest devel branch
```
### Configuration
```console
-
```
### OS / Environment
-
### Steps to Reproduce
Run `ANSIBLE_LOG_PATH=test ansible localhost -m setup`
### Expected Results
File `test` contains log output.
### Actual Results
```console
File `test` is empty.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82226
|
https://github.com/ansible/ansible/pull/82227
|
f8cdec632461fbd821050fc584543c1dda6dfc5c
|
f6d7dd0840c079d0d2c2e3d8852b952462423a78
| 2023-11-16T07:03:17Z |
python
| 2023-11-16T19:49:40Z |
lib/ansible/utils/display.py
|
# (c) 2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
try:
import curses
except ImportError:
HAS_CURSES = False
else:
# this will be set to False if curses.setupterm() fails
HAS_CURSES = True
import collections.abc as c
import codecs
import ctypes.util
import fcntl
import getpass
import io
import logging
import os
import random
import subprocess
import sys
import termios
import textwrap
import threading
import time
import tty
import typing as t
from functools import wraps
from struct import unpack, pack
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleAssertionError, AnsiblePromptInterrupt, AnsiblePromptNoninteractive
from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.module_utils.six import text_type
from ansible.utils.color import stringc
from ansible.utils.multiprocessing import context as multiprocessing_context
from ansible.utils.singleton import Singleton
from ansible.utils.unsafe_proxy import wrap_var
if t.TYPE_CHECKING:
# avoid circular import at runtime
from ansible.executor.task_queue_manager import FinalQueue
_LIBC = ctypes.cdll.LoadLibrary(ctypes.util.find_library('c'))
# Set argtypes, to avoid segfault if the wrong type is provided,
# restype is assumed to be c_int
_LIBC.wcwidth.argtypes = (ctypes.c_wchar,)
_LIBC.wcswidth.argtypes = (ctypes.c_wchar_p, ctypes.c_int)
# Max for c_int
_MAX_INT = 2 ** (ctypes.sizeof(ctypes.c_int) * 8 - 1) - 1
MOVE_TO_BOL = b'\r'
CLEAR_TO_EOL = b'\x1b[K'
def get_text_width(text: str) -> int:
"""Function that utilizes ``wcswidth`` or ``wcwidth`` to determine the
number of columns used to display a text string.
We try first with ``wcswidth``, and fallback to iterating each
character and using wcwidth individually, falling back to a value of 0
for non-printable wide characters.
"""
if not isinstance(text, text_type):
raise TypeError('get_text_width requires text, not %s' % type(text))
try:
width = _LIBC.wcswidth(text, _MAX_INT)
except ctypes.ArgumentError:
width = -1
if width != -1:
return width
width = 0
counter = 0
for c in text:
counter += 1
if c in (u'\x08', u'\x7f', u'\x94', u'\x1b'):
# A few characters result in a subtraction of length:
# BS, DEL, CCH, ESC
# ESC is slightly different in that it's part of an escape sequence, and
# while ESC is non printable, it's part of an escape sequence, which results
# in a single non printable length
width -= 1
counter -= 1
continue
try:
w = _LIBC.wcwidth(c)
except ctypes.ArgumentError:
w = -1
if w == -1:
# -1 signifies a non-printable character
# use 0 here as a best effort
w = 0
width += w
if width == 0 and counter:
raise EnvironmentError(
'get_text_width could not calculate text width of %r' % text
)
# It doesn't make sense to have a negative printable width
return width if width >= 0 else 0
def proxy_display(method):
def proxyit(self, *args, **kwargs):
if self._final_q:
# If _final_q is set, that means we are in a WorkerProcess
# and instead of displaying messages directly from the fork
# we will proxy them through the queue
return self._final_q.send_display(method.__name__, *args, **kwargs)
else:
return method(self, *args, **kwargs)
return proxyit
class FilterBlackList(logging.Filter):
def __init__(self, blacklist):
self.blacklist = [logging.Filter(name) for name in blacklist]
def filter(self, record):
return not any(f.filter(record) for f in self.blacklist)
class FilterUserInjector(logging.Filter):
"""
This is a filter which injects the current user as the 'user' attribute on each record. We need to add this filter
to all logger handlers so that 3rd party libraries won't print an exception due to user not being defined.
"""
try:
username = getpass.getuser()
except KeyError:
# people like to make containers w/o actual valid passwd/shadow and use host uids
username = 'uid=%s' % os.getuid()
def filter(self, record):
record.user = FilterUserInjector.username
return True
logger = None
# TODO: make this a callback event instead
if getattr(C, 'DEFAULT_LOG_PATH'):
path = C.DEFAULT_LOG_PATH
if path and (os.path.exists(path) and os.access(path, os.W_OK)) or os.access(os.path.dirname(path), os.W_OK):
# NOTE: level is kept at INFO to avoid security disclosures caused by certain libraries when using DEBUG
logging.basicConfig(filename=path, level=logging.INFO, # DO NOT set to logging.DEBUG
format='%(asctime)s p=%(process)d u=%(user)s n=%(name)s | %(message)s')
logger = logging.getLogger('ansible')
for handler in logging.root.handlers:
handler.addFilter(FilterBlackList(getattr(C, 'DEFAULT_LOG_FILTER', [])))
handler.addFilter(FilterUserInjector())
else:
print("[WARNING]: log file at %s is not writeable and we cannot create it, aborting\n" % path, file=sys.stderr)
# map color to log levels
color_to_log_level = {C.COLOR_ERROR: logging.ERROR,
C.COLOR_WARN: logging.WARNING,
C.COLOR_OK: logging.INFO,
C.COLOR_SKIP: logging.WARNING,
C.COLOR_UNREACHABLE: logging.ERROR,
C.COLOR_DEBUG: logging.DEBUG,
C.COLOR_CHANGED: logging.INFO,
C.COLOR_DEPRECATE: logging.WARNING,
C.COLOR_VERBOSE: logging.INFO}
b_COW_PATHS = (
b"/usr/bin/cowsay",
b"/usr/games/cowsay",
b"/usr/local/bin/cowsay", # BSD path for cowsay
b"/opt/local/bin/cowsay", # MacPorts path for cowsay
)
def _synchronize_textiowrapper(tio: t.TextIO, lock: threading.RLock):
# Ensure that a background thread can't hold the internal buffer lock on a file object
# during a fork, which causes forked children to hang. We're using display's existing lock for
# convenience (and entering the lock before a fork).
def _wrap_with_lock(f, lock):
@wraps(f)
def locking_wrapper(*args, **kwargs):
with lock:
return f(*args, **kwargs)
return locking_wrapper
buffer = tio.buffer
# monkeypatching the underlying file-like object isn't great, but likely safer than subclassing
buffer.write = _wrap_with_lock(buffer.write, lock) # type: ignore[method-assign]
buffer.flush = _wrap_with_lock(buffer.flush, lock) # type: ignore[method-assign]
def setraw(fd: int, when: int = termios.TCSAFLUSH) -> None:
"""Put terminal into a raw mode.
Copied from ``tty`` from CPython 3.11.0, and modified to not remove OPOST from OFLAG
OPOST is kept to prevent an issue with multi line prompts from being corrupted now that display
is proxied via the queue from forks. The problem is a race condition, in that we proxy the display
over the fork, but before it can be displayed, this plugin will have continued executing, potentially
setting stdout and stdin to raw which remove output post processing that commonly converts NL to CRLF
"""
mode = termios.tcgetattr(fd)
mode[tty.IFLAG] = mode[tty.IFLAG] & ~(termios.BRKINT | termios.ICRNL | termios.INPCK | termios.ISTRIP | termios.IXON)
mode[tty.OFLAG] = mode[tty.OFLAG] & ~(termios.OPOST)
mode[tty.CFLAG] = mode[tty.CFLAG] & ~(termios.CSIZE | termios.PARENB)
mode[tty.CFLAG] = mode[tty.CFLAG] | termios.CS8
mode[tty.LFLAG] = mode[tty.LFLAG] & ~(termios.ECHO | termios.ICANON | termios.IEXTEN | termios.ISIG)
mode[tty.CC][termios.VMIN] = 1
mode[tty.CC][termios.VTIME] = 0
termios.tcsetattr(fd, when, mode)
def clear_line(stdout: t.BinaryIO) -> None:
stdout.write(b'\x1b[%s' % MOVE_TO_BOL)
stdout.write(b'\x1b[%s' % CLEAR_TO_EOL)
def setup_prompt(stdin_fd: int, stdout_fd: int, seconds: int, echo: bool) -> None:
setraw(stdin_fd)
# Only set stdout to raw mode if it is a TTY. This is needed when redirecting
# stdout to a file since a file cannot be set to raw mode.
if os.isatty(stdout_fd):
setraw(stdout_fd)
if echo:
new_settings = termios.tcgetattr(stdin_fd)
new_settings[3] = new_settings[3] | termios.ECHO
termios.tcsetattr(stdin_fd, termios.TCSANOW, new_settings)
def setupterm() -> None:
# Nest the try except since curses.error is not available if curses did not import
try:
curses.setupterm()
except (curses.error, TypeError, io.UnsupportedOperation):
global HAS_CURSES
HAS_CURSES = False
else:
global MOVE_TO_BOL
global CLEAR_TO_EOL
# curses.tigetstr() returns None in some circumstances
MOVE_TO_BOL = curses.tigetstr('cr') or MOVE_TO_BOL
CLEAR_TO_EOL = curses.tigetstr('el') or CLEAR_TO_EOL
class Display(metaclass=Singleton):
def __init__(self, verbosity: int = 0) -> None:
self._final_q: FinalQueue | None = None
# NB: this lock is used to both prevent intermingled output between threads and to block writes during forks.
# Do not change the type of this lock or upgrade to a shared lock (eg multiprocessing.RLock).
self._lock = threading.RLock()
self.columns = None
self.verbosity = verbosity
if C.LOG_VERBOSITY is None:
self.log_verbosity = verbosity
else:
self.log_verbosity = max(verbosity, C.LOG_VERBOSITY)
# list of all deprecation messages to prevent duplicate display
self._deprecations: dict[str, int] = {}
self._warns: dict[str, int] = {}
self._errors: dict[str, int] = {}
self.b_cowsay: bytes | None = None
self.noncow = C.ANSIBLE_COW_SELECTION
self.set_cowsay_info()
if self.b_cowsay:
try:
cmd = subprocess.Popen([self.b_cowsay, "-l"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
if cmd.returncode:
raise Exception
self.cows_available: set[str] = {to_text(c) for c in out.split()}
if C.ANSIBLE_COW_ACCEPTLIST and any(C.ANSIBLE_COW_ACCEPTLIST):
self.cows_available = set(C.ANSIBLE_COW_ACCEPTLIST).intersection(self.cows_available)
except Exception:
# could not execute cowsay for some reason
self.b_cowsay = None
self._set_column_width()
try:
# NB: we're relying on the display singleton behavior to ensure this only runs once
_synchronize_textiowrapper(sys.stdout, self._lock)
_synchronize_textiowrapper(sys.stderr, self._lock)
except Exception as ex:
self.warning(f"failed to patch stdout/stderr for fork-safety: {ex}")
codecs.register_error('_replacing_warning_handler', self._replacing_warning_handler)
try:
sys.stdout.reconfigure(errors='_replacing_warning_handler')
sys.stderr.reconfigure(errors='_replacing_warning_handler')
except Exception as ex:
self.warning(f"failed to reconfigure stdout/stderr with custom encoding error handler: {ex}")
self.setup_curses = False
def _replacing_warning_handler(self, exception: UnicodeError) -> tuple[str | bytes, int]:
# TODO: This should probably be deferred until after the current display is completed
# this will require some amount of new functionality
self.deprecated(
'Non UTF-8 encoded data replaced with "?" while displaying text to stdout/stderr, this is temporary and will become an error',
version='2.18',
)
return '?', exception.end
def set_queue(self, queue: FinalQueue) -> None:
"""Set the _final_q on Display, so that we know to proxy display over the queue
instead of directly writing to stdout/stderr from forks
This is only needed in ansible.executor.process.worker:WorkerProcess._run
"""
if multiprocessing_context.parent_process() is None:
raise RuntimeError('queue cannot be set in parent process')
self._final_q = queue
def set_cowsay_info(self) -> None:
if C.ANSIBLE_NOCOWS:
return
if C.ANSIBLE_COW_PATH:
self.b_cowsay = C.ANSIBLE_COW_PATH
else:
for b_cow_path in b_COW_PATHS:
if os.path.exists(b_cow_path):
self.b_cowsay = b_cow_path
@proxy_display
def display(
self,
msg: str,
color: str | None = None,
stderr: bool = False,
screen_only: bool = False,
log_only: bool = False,
newline: bool = True,
) -> None:
""" Display a message to the user
Note: msg *must* be a unicode string to prevent UnicodeError tracebacks.
"""
if not isinstance(msg, str):
raise TypeError(f'Display message must be str, not: {msg.__class__.__name__}')
nocolor = msg
if not log_only:
has_newline = msg.endswith(u'\n')
if has_newline:
msg2 = msg[:-1]
else:
msg2 = msg
if color:
msg2 = stringc(msg2, color)
if has_newline or newline:
msg2 = msg2 + u'\n'
# Note: After Display() class is refactored need to update the log capture
# code in 'bin/ansible-connection' (and other relevant places).
if not stderr:
fileobj = sys.stdout
else:
fileobj = sys.stderr
with self._lock:
fileobj.write(msg2)
# With locks, and the fact that we aren't printing from forks
# just write, and let the system flush. Everything should come out peachy
# I've left this code for historical purposes, or in case we need to add this
# back at a later date. For now ``TaskQueueManager.cleanup`` will perform a
# final flush at shutdown.
# try:
# fileobj.flush()
# except IOError as e:
# # Ignore EPIPE in case fileobj has been prematurely closed, eg.
# # when piping to "head -n1"
# if e.errno != errno.EPIPE:
# raise
if logger and not screen_only:
self._log(nocolor, color)
@proxy_display
def _log(self, msg: str, color: str | None = None, caplevel: int = 0):
if self.log_verbosity > caplevel:
msg2 = msg.lstrip('\n')
lvl = logging.INFO
if color:
# set logger level based on color (not great)
try:
lvl = color_to_log_level[color]
except KeyError:
# this should not happen, but JIC
raise AnsibleAssertionError('Invalid color supplied to display: %s' % color)
# actually log
logger.log(lvl, msg2)
def v(self, msg: str, host: str | None = None) -> None:
return self.verbose(msg, host=host, caplevel=0)
def vv(self, msg: str, host: str | None = None) -> None:
return self.verbose(msg, host=host, caplevel=1)
def vvv(self, msg: str, host: str | None = None) -> None:
return self.verbose(msg, host=host, caplevel=2)
def vvvv(self, msg: str, host: str | None = None) -> None:
return self.verbose(msg, host=host, caplevel=3)
def vvvvv(self, msg: str, host: str | None = None) -> None:
return self.verbose(msg, host=host, caplevel=4)
def vvvvvv(self, msg: str, host: str | None = None) -> None:
return self.verbose(msg, host=host, caplevel=5)
def debug(self, msg: str, host: str | None = None) -> None:
if C.DEFAULT_DEBUG:
if host is None:
self.display("%6d %0.5f: %s" % (os.getpid(), time.time(), msg), color=C.COLOR_DEBUG)
else:
self.display("%6d %0.5f [%s]: %s" % (os.getpid(), time.time(), host, msg), color=C.COLOR_DEBUG)
def verbose(self, msg: str, host: str | None = None, caplevel: int = 2) -> None:
to_stderr = C.VERBOSE_TO_STDERR
if self.verbosity > caplevel:
if host is None:
self.display(msg, color=C.COLOR_VERBOSE, stderr=to_stderr)
else:
self.display("<%s> %s" % (host, msg), color=C.COLOR_VERBOSE, stderr=to_stderr)
elif self.log_verbosity > self.verbosity and self.log_verbosity > caplevel:
# we send to log if log was configured with higher verbosity
if host is not None:
msg = "<%s> %s" % (host, msg)
self._log(msg, C.COLOR_VERBOSE, caplevel)
def get_deprecation_message(
self,
msg: str,
version: str | None = None,
removed: bool = False,
date: str | None = None,
collection_name: str | None = None,
) -> str:
''' used to print out a deprecation message.'''
msg = msg.strip()
if msg and msg[-1] not in ['!', '?', '.']:
msg += '.'
if collection_name == 'ansible.builtin':
collection_name = 'ansible-core'
if removed:
header = '[DEPRECATED]: {0}'.format(msg)
removal_fragment = 'This feature was removed'
help_text = 'Please update your playbooks.'
else:
header = '[DEPRECATION WARNING]: {0}'.format(msg)
removal_fragment = 'This feature will be removed'
# FUTURE: make this a standalone warning so it only shows up once?
help_text = 'Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.'
if collection_name:
from_fragment = 'from {0}'.format(collection_name)
else:
from_fragment = ''
if date:
when = 'in a release after {0}.'.format(date)
elif version:
when = 'in version {0}.'.format(version)
else:
when = 'in a future release.'
message_text = ' '.join(f for f in [header, removal_fragment, from_fragment, when, help_text] if f)
return message_text
@proxy_display
def deprecated(
self,
msg: str,
version: str | None = None,
removed: bool = False,
date: str | None = None,
collection_name: str | None = None,
) -> None:
if not removed and not C.DEPRECATION_WARNINGS:
return
message_text = self.get_deprecation_message(msg, version=version, removed=removed, date=date, collection_name=collection_name)
if removed:
raise AnsibleError(message_text)
wrapped = textwrap.wrap(message_text, self.columns, drop_whitespace=False)
message_text = "\n".join(wrapped) + "\n"
if message_text not in self._deprecations:
self.display(message_text.strip(), color=C.COLOR_DEPRECATE, stderr=True)
self._deprecations[message_text] = 1
@proxy_display
def warning(self, msg: str, formatted: bool = False) -> None:
if not formatted:
new_msg = "[WARNING]: %s" % msg
wrapped = textwrap.wrap(new_msg, self.columns)
new_msg = "\n".join(wrapped) + "\n"
else:
new_msg = "\n[WARNING]: \n%s" % msg
if new_msg not in self._warns:
self.display(new_msg, color=C.COLOR_WARN, stderr=True)
self._warns[new_msg] = 1
def system_warning(self, msg: str) -> None:
if C.SYSTEM_WARNINGS:
self.warning(msg)
def banner(self, msg: str, color: str | None = None, cows: bool = True) -> None:
'''
Prints a header-looking line with cowsay or stars with length depending on terminal width (3 minimum)
'''
msg = to_text(msg)
if self.b_cowsay and cows:
try:
self.banner_cowsay(msg)
return
except OSError:
self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.")
msg = msg.strip()
try:
star_len = self.columns - get_text_width(msg)
except EnvironmentError:
star_len = self.columns - len(msg)
if star_len <= 3:
star_len = 3
stars = u"*" * star_len
self.display(u"\n%s %s" % (msg, stars), color=color)
def banner_cowsay(self, msg: str, color: str | None = None) -> None:
if u": [" in msg:
msg = msg.replace(u"[", u"")
if msg.endswith(u"]"):
msg = msg[:-1]
runcmd = [self.b_cowsay, b"-W", b"60"]
if self.noncow:
thecow = self.noncow
if thecow == 'random':
thecow = random.choice(list(self.cows_available))
runcmd.append(b'-f')
runcmd.append(to_bytes(thecow))
runcmd.append(to_bytes(msg))
cmd = subprocess.Popen(runcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
self.display(u"%s\n" % to_text(out), color=color)
def error(self, msg: str, wrap_text: bool = True) -> None:
if wrap_text:
new_msg = u"\n[ERROR]: %s" % msg
wrapped = textwrap.wrap(new_msg, self.columns)
new_msg = u"\n".join(wrapped) + u"\n"
else:
new_msg = u"ERROR! %s" % msg
if new_msg not in self._errors:
self.display(new_msg, color=C.COLOR_ERROR, stderr=True)
self._errors[new_msg] = 1
@staticmethod
def prompt(msg: str, private: bool = False) -> str:
if private:
return getpass.getpass(msg)
else:
return input(msg)
def do_var_prompt(
self,
varname: str,
private: bool = True,
prompt: str | None = None,
encrypt: str | None = None,
confirm: bool = False,
salt_size: int | None = None,
salt: str | None = None,
default: str | None = None,
unsafe: bool = False,
) -> str:
result = None
if sys.__stdin__.isatty():
do_prompt = self.prompt
if prompt and default is not None:
msg = "%s [%s]: " % (prompt, default)
elif prompt:
msg = "%s: " % prompt
else:
msg = 'input for %s: ' % varname
if confirm:
while True:
result = do_prompt(msg, private)
second = do_prompt("confirm " + msg, private)
if result == second:
break
self.display("***** VALUES ENTERED DO NOT MATCH ****")
else:
result = do_prompt(msg, private)
else:
result = None
self.warning("Not prompting as we are not in interactive mode")
# if result is false and default is not None
if not result and default is not None:
result = default
if encrypt:
# Circular import because encrypt needs a display class
from ansible.utils.encrypt import do_encrypt
result = do_encrypt(result, encrypt, salt_size=salt_size, salt=salt)
# handle utf-8 chars
result = to_text(result, errors='surrogate_or_strict')
if unsafe:
result = wrap_var(result)
return result
def _set_column_width(self) -> None:
if os.isatty(1):
tty_size = unpack('HHHH', fcntl.ioctl(1, termios.TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[1]
else:
tty_size = 0
self.columns = max(79, tty_size - 1)
def prompt_until(
self,
msg: str,
private: bool = False,
seconds: int | None = None,
interrupt_input: c.Container[bytes] | None = None,
complete_input: c.Container[bytes] | None = None,
) -> bytes:
if self._final_q:
from ansible.executor.process.worker import current_worker
self._final_q.send_prompt(
worker_id=current_worker.worker_id, prompt=msg, private=private, seconds=seconds,
interrupt_input=interrupt_input, complete_input=complete_input
)
return current_worker.worker_queue.get()
if HAS_CURSES and not self.setup_curses:
setupterm()
self.setup_curses = True
if (
self._stdin_fd is None
or not os.isatty(self._stdin_fd)
# Compare the current process group to the process group associated
# with terminal of the given file descriptor to determine if the process
# is running in the background.
or os.getpgrp() != os.tcgetpgrp(self._stdin_fd)
):
raise AnsiblePromptNoninteractive('stdin is not interactive')
# When seconds/interrupt_input/complete_input are all None, this does mostly the same thing as input/getpass,
# but self.prompt may raise a KeyboardInterrupt, which must be caught in the main thread.
# If the main thread handled this, it would also need to send a newline to the tty of any hanging pids.
# if seconds is None and interrupt_input is None and complete_input is None:
# try:
# return self.prompt(msg, private=private)
# except KeyboardInterrupt:
# # can't catch in the results_thread_main daemon thread
# raise AnsiblePromptInterrupt('user interrupt')
self.display(msg)
result = b''
with self._lock:
original_stdin_settings = termios.tcgetattr(self._stdin_fd)
try:
setup_prompt(self._stdin_fd, self._stdout_fd, seconds, not private)
# flush the buffer to make sure no previous key presses
# are read in below
termios.tcflush(self._stdin, termios.TCIFLUSH)
# read input 1 char at a time until the optional timeout or complete/interrupt condition is met
return self._read_non_blocking_stdin(echo=not private, seconds=seconds, interrupt_input=interrupt_input, complete_input=complete_input)
finally:
# restore the old settings for the duped stdin stdin_fd
termios.tcsetattr(self._stdin_fd, termios.TCSADRAIN, original_stdin_settings)
def _read_non_blocking_stdin(
self,
echo: bool = False,
seconds: int | None = None,
interrupt_input: c.Container[bytes] | None = None,
complete_input: c.Container[bytes] | None = None,
) -> bytes:
if self._final_q:
raise NotImplementedError
if seconds is not None:
start = time.time()
if interrupt_input is None:
try:
interrupt = termios.tcgetattr(sys.stdin.buffer.fileno())[6][termios.VINTR]
except Exception:
interrupt = b'\x03' # value for Ctrl+C
try:
backspace_sequences = [termios.tcgetattr(self._stdin_fd)[6][termios.VERASE]]
except Exception:
# unsupported/not present, use default
backspace_sequences = [b'\x7f', b'\x08']
result_string = b''
while seconds is None or (time.time() - start < seconds):
key_pressed = None
try:
os.set_blocking(self._stdin_fd, False)
while key_pressed is None and (seconds is None or (time.time() - start < seconds)):
key_pressed = self._stdin.read(1)
# throttle to prevent excess CPU consumption
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
finally:
os.set_blocking(self._stdin_fd, True)
if key_pressed is None:
key_pressed = b''
if (interrupt_input is None and key_pressed == interrupt) or (interrupt_input is not None and key_pressed.lower() in interrupt_input):
clear_line(self._stdout)
raise AnsiblePromptInterrupt('user interrupt')
if (complete_input is None and key_pressed in (b'\r', b'\n')) or (complete_input is not None and key_pressed.lower() in complete_input):
clear_line(self._stdout)
break
elif key_pressed in backspace_sequences:
clear_line(self._stdout)
result_string = result_string[:-1]
if echo:
self._stdout.write(result_string)
self._stdout.flush()
else:
result_string += key_pressed
return result_string
@property
def _stdin(self) -> t.BinaryIO | None:
if self._final_q:
raise NotImplementedError
try:
return sys.stdin.buffer
except AttributeError:
return None
@property
def _stdin_fd(self) -> int | None:
try:
return self._stdin.fileno()
except (ValueError, AttributeError):
return None
@property
def _stdout(self) -> t.BinaryIO:
if self._final_q:
raise NotImplementedError
return sys.stdout.buffer
@property
def _stdout_fd(self) -> int | None:
try:
return self._stdout.fileno()
except (ValueError, AttributeError):
return None
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,226 |
ANSIBLE_LOG_PATH no longer works since #81692 got merged
|
### Summary
If you run `ANSIBLE_LOG_PATH=test ansible localhost -m setup`, `test` is now an empty file. Before #81692 got merged, it contained log output like
```
023-11-16 08:01:52,262 p=313108 u=felix n=ansible | [WARNING]: You are running the development version of Ansible. You should only run Ansible from "devel" if you are modifying the Ansible engine, or trying out features under development.
This is a rapidly changing source of code and can become unstable at any point.
2023-11-16 08:01:52,383 p=313108 u=felix n=ansible | [WARNING]: No inventory was parsed, only implicit localhost is available
2023-11-16 08:01:53,511 p=313108 u=felix n=ansible | localhost | SUCCESS => {
"ansible_facts": {
...
```
### Issue Type
Bug Report
### Component Name
logging
### Ansible Version
```console
latest devel branch
```
### Configuration
```console
-
```
### OS / Environment
-
### Steps to Reproduce
Run `ANSIBLE_LOG_PATH=test ansible localhost -m setup`
### Expected Results
File `test` contains log output.
### Actual Results
```console
File `test` is empty.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82226
|
https://github.com/ansible/ansible/pull/82227
|
f8cdec632461fbd821050fc584543c1dda6dfc5c
|
f6d7dd0840c079d0d2c2e3d8852b952462423a78
| 2023-11-16T07:03:17Z |
python
| 2023-11-16T19:49:40Z |
test/integration/targets/ansible_log/aliases
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,226 |
ANSIBLE_LOG_PATH no longer works since #81692 got merged
|
### Summary
If you run `ANSIBLE_LOG_PATH=test ansible localhost -m setup`, `test` is now an empty file. Before #81692 got merged, it contained log output like
```
023-11-16 08:01:52,262 p=313108 u=felix n=ansible | [WARNING]: You are running the development version of Ansible. You should only run Ansible from "devel" if you are modifying the Ansible engine, or trying out features under development.
This is a rapidly changing source of code and can become unstable at any point.
2023-11-16 08:01:52,383 p=313108 u=felix n=ansible | [WARNING]: No inventory was parsed, only implicit localhost is available
2023-11-16 08:01:53,511 p=313108 u=felix n=ansible | localhost | SUCCESS => {
"ansible_facts": {
...
```
### Issue Type
Bug Report
### Component Name
logging
### Ansible Version
```console
latest devel branch
```
### Configuration
```console
-
```
### OS / Environment
-
### Steps to Reproduce
Run `ANSIBLE_LOG_PATH=test ansible localhost -m setup`
### Expected Results
File `test` contains log output.
### Actual Results
```console
File `test` is empty.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82226
|
https://github.com/ansible/ansible/pull/82227
|
f8cdec632461fbd821050fc584543c1dda6dfc5c
|
f6d7dd0840c079d0d2c2e3d8852b952462423a78
| 2023-11-16T07:03:17Z |
python
| 2023-11-16T19:49:40Z |
test/integration/targets/ansible_log/logit.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,226 |
ANSIBLE_LOG_PATH no longer works since #81692 got merged
|
### Summary
If you run `ANSIBLE_LOG_PATH=test ansible localhost -m setup`, `test` is now an empty file. Before #81692 got merged, it contained log output like
```
023-11-16 08:01:52,262 p=313108 u=felix n=ansible | [WARNING]: You are running the development version of Ansible. You should only run Ansible from "devel" if you are modifying the Ansible engine, or trying out features under development.
This is a rapidly changing source of code and can become unstable at any point.
2023-11-16 08:01:52,383 p=313108 u=felix n=ansible | [WARNING]: No inventory was parsed, only implicit localhost is available
2023-11-16 08:01:53,511 p=313108 u=felix n=ansible | localhost | SUCCESS => {
"ansible_facts": {
...
```
### Issue Type
Bug Report
### Component Name
logging
### Ansible Version
```console
latest devel branch
```
### Configuration
```console
-
```
### OS / Environment
-
### Steps to Reproduce
Run `ANSIBLE_LOG_PATH=test ansible localhost -m setup`
### Expected Results
File `test` contains log output.
### Actual Results
```console
File `test` is empty.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82226
|
https://github.com/ansible/ansible/pull/82227
|
f8cdec632461fbd821050fc584543c1dda6dfc5c
|
f6d7dd0840c079d0d2c2e3d8852b952462423a78
| 2023-11-16T07:03:17Z |
python
| 2023-11-16T19:49:40Z |
test/integration/targets/ansible_log/runme.sh
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 79,683 |
wait_for no longer works for files in e.g. sysfs because it uses mmap() instead of read()
|
### Summary
We have an ansible task using `wait_for` module to wait for network device carrier to be up:
```
wait_for:
path: "/sys/class/net/{{ device }}/carrier"
search_regex: "1"
timeout: 60
```
This no longer works since https://github.com/ansible/ansible/commit/9d4ced1237380051334b54379ff64e45c0341a6d, because `mmap()` is now used instead of `read()`, and it isn't supported by `sysfs` in this case.
Do we really need to use `mmap()`? I imagine it might cause issues with other pseudofilesystems too.
In the meantime, we use a workaround: use grep in a loop instead
```
command:
cmd: "grep 1 /sys/class/net/{{ device }}/carrier"
timeout: 1
retries: 60
register: carrier
until: carrier is successful
```
### Issue Type
Bug Report
### Component Name
wait_for
### Ansible Version
```console
$ ansible --version
ansible [core 2.13.7]
config file = None
configured module search path = ['/Users/rob.muir/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Users/rob.muir/workspace/trash/build/.env/lib/python3.10/site-packages/ansible
ansible collection location = /Users/rob.muir/.ansible/collections:/usr/share/ansible/collections
executable location = /Users/rob.muir/workspace/trash/build/.env/bin/ansible
python version = 3.10.7 (main, Sep 15 2022, 01:50:27) [Clang 12.0.0 (clang-1200.0.32.29)]
jinja version = 3.0.3
libyaml = False
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
```
### OS / Environment
Mac OS controller, Centos Stream 9 target
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```
# set device to say, eth0 or whatever is on your computer
wait_for:
path: "/sys/class/net/{{ device }}/carrier"
search_regex: "1"
timeout: 60
```
### Expected Results
Expect wait_for to still work with files in /sys
### Actual Results
```console
The wait_for will fail, only with recent ansible versions, due to the use of mmap()
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/79683
|
https://github.com/ansible/ansible/pull/82064
|
f6d7dd0840c079d0d2c2e3d8852b952462423a78
|
8b102dca4a236efcf645e8c17721da549cda61c5
| 2023-01-06T15:47:11Z |
python
| 2023-11-16T20:20:52Z |
changelogs/fragments/wait_for_mmap.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 79,683 |
wait_for no longer works for files in e.g. sysfs because it uses mmap() instead of read()
|
### Summary
We have an ansible task using `wait_for` module to wait for network device carrier to be up:
```
wait_for:
path: "/sys/class/net/{{ device }}/carrier"
search_regex: "1"
timeout: 60
```
This no longer works since https://github.com/ansible/ansible/commit/9d4ced1237380051334b54379ff64e45c0341a6d, because `mmap()` is now used instead of `read()`, and it isn't supported by `sysfs` in this case.
Do we really need to use `mmap()`? I imagine it might cause issues with other pseudofilesystems too.
In the meantime, we use a workaround: use grep in a loop instead
```
command:
cmd: "grep 1 /sys/class/net/{{ device }}/carrier"
timeout: 1
retries: 60
register: carrier
until: carrier is successful
```
### Issue Type
Bug Report
### Component Name
wait_for
### Ansible Version
```console
$ ansible --version
ansible [core 2.13.7]
config file = None
configured module search path = ['/Users/rob.muir/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Users/rob.muir/workspace/trash/build/.env/lib/python3.10/site-packages/ansible
ansible collection location = /Users/rob.muir/.ansible/collections:/usr/share/ansible/collections
executable location = /Users/rob.muir/workspace/trash/build/.env/bin/ansible
python version = 3.10.7 (main, Sep 15 2022, 01:50:27) [Clang 12.0.0 (clang-1200.0.32.29)]
jinja version = 3.0.3
libyaml = False
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
```
### OS / Environment
Mac OS controller, Centos Stream 9 target
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```
# set device to say, eth0 or whatever is on your computer
wait_for:
path: "/sys/class/net/{{ device }}/carrier"
search_regex: "1"
timeout: 60
```
### Expected Results
Expect wait_for to still work with files in /sys
### Actual Results
```console
The wait_for will fail, only with recent ansible versions, due to the use of mmap()
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/79683
|
https://github.com/ansible/ansible/pull/82064
|
f6d7dd0840c079d0d2c2e3d8852b952462423a78
|
8b102dca4a236efcf645e8c17721da549cda61c5
| 2023-01-06T15:47:11Z |
python
| 2023-11-16T20:20:52Z |
lib/ansible/modules/wait_for.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Jeroen Hoekx <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = r'''
---
module: wait_for
short_description: Waits for a condition before continuing
description:
- You can wait for a set amount of time O(timeout), this is the default if nothing is specified or just O(timeout) is specified.
This does not produce an error.
- Waiting for a port to become available is useful for when services are not immediately available after their init scripts return
which is true of certain Java application servers.
- It is also useful when starting guests with the M(community.libvirt.virt) module and needing to pause until they are ready.
- This module can also be used to wait for a regex match a string to be present in a file.
- In Ansible 1.6 and later, this module can also be used to wait for a file to be available or
absent on the filesystem.
- In Ansible 1.8 and later, this module can also be used to wait for active connections to be closed before continuing, useful if a node
is being rotated out of a load balancer pool.
- For Windows targets, use the M(ansible.windows.win_wait_for) module instead.
version_added: "0.7"
options:
host:
description:
- A resolvable hostname or IP address to wait for.
type: str
default: 127.0.0.1
timeout:
description:
- Maximum number of seconds to wait for, when used with another condition it will force an error.
- When used without other conditions it is equivalent of just sleeping.
type: int
default: 300
connect_timeout:
description:
- Maximum number of seconds to wait for a connection to happen before closing and retrying.
type: int
default: 5
delay:
description:
- Number of seconds to wait before starting to poll.
type: int
default: 0
port:
description:
- Port number to poll.
- O(path) and O(port) are mutually exclusive parameters.
type: int
active_connection_states:
description:
- The list of TCP connection states which are counted as active connections.
type: list
elements: str
default: [ ESTABLISHED, FIN_WAIT1, FIN_WAIT2, SYN_RECV, SYN_SENT, TIME_WAIT ]
version_added: "2.3"
state:
description:
- Either V(present), V(started), or V(stopped), V(absent), or V(drained).
- When checking a port V(started) will ensure the port is open, V(stopped) will check that it is closed, V(drained) will check for active connections.
- When checking for a file or a search string V(present) or V(started) will ensure that the file or string is present before continuing,
V(absent) will check that file is absent or removed.
type: str
choices: [ absent, drained, present, started, stopped ]
default: started
path:
description:
- Path to a file on the filesystem that must exist before continuing.
- O(path) and O(port) are mutually exclusive parameters.
type: path
version_added: "1.4"
search_regex:
description:
- Can be used to match a string in either a file or a socket connection.
- Defaults to a multiline regex.
type: str
version_added: "1.4"
exclude_hosts:
description:
- List of hosts or IPs to ignore when looking for active TCP connections for V(drained) state.
type: list
elements: str
version_added: "1.8"
sleep:
description:
- Number of seconds to sleep between checks.
- Before Ansible 2.3 this was hardcoded to 1 second.
type: int
default: 1
version_added: "2.3"
msg:
description:
- This overrides the normal error message from a failure to meet the required conditions.
type: str
version_added: "2.4"
extends_documentation_fragment: action_common_attributes
attributes:
check_mode:
support: none
diff_mode:
support: none
platform:
platforms: posix
notes:
- The ability to use search_regex with a port connection was added in Ansible 1.7.
- Prior to Ansible 2.4, testing for the absence of a directory or UNIX socket did not work correctly.
- Prior to Ansible 2.4, testing for the presence of a file did not work correctly if the remote user did not have read access to that file.
- Under some circumstances when using mandatory access control, a path may always be treated as being absent even if it exists, but
can't be modified or created by the remote user either.
- When waiting for a path, symbolic links will be followed. Many other modules that manipulate files do not follow symbolic links,
so operations on the path using other modules may not work exactly as expected.
seealso:
- module: ansible.builtin.wait_for_connection
- module: ansible.windows.win_wait_for
- module: community.windows.win_wait_for_process
author:
- Jeroen Hoekx (@jhoekx)
- John Jarvis (@jarv)
- Andrii Radyk (@AnderEnder)
'''
EXAMPLES = r'''
- name: Sleep for 300 seconds and continue with play
ansible.builtin.wait_for:
timeout: 300
delegate_to: localhost
- name: Wait for port 8000 to become open on the host, don't start checking for 10 seconds
ansible.builtin.wait_for:
port: 8000
delay: 10
- name: Waits for port 8000 of any IP to close active connections, don't start checking for 10 seconds
ansible.builtin.wait_for:
host: 0.0.0.0
port: 8000
delay: 10
state: drained
- name: Wait for port 8000 of any IP to close active connections, ignoring connections for specified hosts
ansible.builtin.wait_for:
host: 0.0.0.0
port: 8000
state: drained
exclude_hosts: 10.2.1.2,10.2.1.3
- name: Wait until the file /tmp/foo is present before continuing
ansible.builtin.wait_for:
path: /tmp/foo
- name: Wait until the string "completed" is in the file /tmp/foo before continuing
ansible.builtin.wait_for:
path: /tmp/foo
search_regex: completed
- name: Wait until regex pattern matches in the file /tmp/foo and print the matched group
ansible.builtin.wait_for:
path: /tmp/foo
search_regex: completed (?P<task>\w+)
register: waitfor
- ansible.builtin.debug:
msg: Completed {{ waitfor['match_groupdict']['task'] }}
- name: Wait until the lock file is removed
ansible.builtin.wait_for:
path: /var/lock/file.lock
state: absent
- name: Wait until the process is finished and pid was destroyed
ansible.builtin.wait_for:
path: /proc/3466/status
state: absent
- name: Output customized message when failed
ansible.builtin.wait_for:
path: /tmp/foo
state: present
msg: Timeout to find file /tmp/foo
# Do not assume the inventory_hostname is resolvable and delay 10 seconds at start
- name: Wait 300 seconds for port 22 to become open and contain "OpenSSH"
ansible.builtin.wait_for:
port: 22
host: '{{ (ansible_ssh_host|default(ansible_host))|default(inventory_hostname) }}'
search_regex: OpenSSH
delay: 10
connection: local
# Same as above but you normally have ansible_connection set in inventory, which overrides 'connection'
- name: Wait 300 seconds for port 22 to become open and contain "OpenSSH"
ansible.builtin.wait_for:
port: 22
host: '{{ (ansible_ssh_host|default(ansible_host))|default(inventory_hostname) }}'
search_regex: OpenSSH
delay: 10
vars:
ansible_connection: local
'''
RETURN = r'''
elapsed:
description: The number of seconds that elapsed while waiting
returned: always
type: int
sample: 23
match_groups:
description: Tuple containing all the subgroups of the match as returned by U(https://docs.python.org/3/library/re.html#re.MatchObject.groups)
returned: always
type: list
sample: ['match 1', 'match 2']
match_groupdict:
description: Dictionary containing all the named subgroups of the match, keyed by the subgroup name,
as returned by U(https://docs.python.org/3/library/re.html#re.MatchObject.groupdict)
returned: always
type: dict
sample:
{
'group': 'match'
}
'''
import binascii
import contextlib
import datetime
import errno
import math
import mmap
import os
import re
import select
import socket
import time
import traceback
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.sys_info import get_platform_subclass
from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils.compat.datetime import utcnow
HAS_PSUTIL = False
PSUTIL_IMP_ERR = None
try:
import psutil
HAS_PSUTIL = True
# just because we can import it on Linux doesn't mean we will use it
except ImportError:
PSUTIL_IMP_ERR = traceback.format_exc()
class TCPConnectionInfo(object):
"""
This is a generic TCP Connection Info strategy class that relies
on the psutil module, which is not ideal for targets, but necessary
for cross platform support.
A subclass may wish to override some or all of these methods.
- _get_exclude_ips()
- get_active_connections()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
match_all_ips = {
socket.AF_INET: '0.0.0.0',
socket.AF_INET6: '::',
}
ipv4_mapped_ipv6_address = {
'prefix': '::ffff',
'match_all': '::ffff:0.0.0.0'
}
def __new__(cls, *args, **kwargs):
new_cls = get_platform_subclass(TCPConnectionInfo)
return super(cls, new_cls).__new__(new_cls)
def __init__(self, module):
self.module = module
self.ips = _convert_host_to_ip(module.params['host'])
self.port = int(self.module.params['port'])
self.exclude_ips = self._get_exclude_ips()
if not HAS_PSUTIL:
module.fail_json(msg=missing_required_lib('psutil'), exception=PSUTIL_IMP_ERR)
def _get_exclude_ips(self):
exclude_hosts = self.module.params['exclude_hosts']
exclude_ips = []
if exclude_hosts is not None:
for host in exclude_hosts:
exclude_ips.extend(_convert_host_to_ip(host))
return exclude_ips
def get_active_connections_count(self):
active_connections = 0
for p in psutil.process_iter():
try:
if hasattr(p, 'get_connections'):
connections = p.get_connections(kind='inet')
else:
connections = p.connections(kind='inet')
except psutil.Error:
# Process is Zombie or other error state
continue
for conn in connections:
if conn.status not in self.module.params['active_connection_states']:
continue
if hasattr(conn, 'local_address'):
(local_ip, local_port) = conn.local_address
else:
(local_ip, local_port) = conn.laddr
if self.port != local_port:
continue
if hasattr(conn, 'remote_address'):
(remote_ip, remote_port) = conn.remote_address
else:
(remote_ip, remote_port) = conn.raddr
if (conn.family, remote_ip) in self.exclude_ips:
continue
if any((
(conn.family, local_ip) in self.ips,
(conn.family, self.match_all_ips[conn.family]) in self.ips,
local_ip.startswith(self.ipv4_mapped_ipv6_address['prefix']) and
(conn.family, self.ipv4_mapped_ipv6_address['match_all']) in self.ips,
)):
active_connections += 1
return active_connections
# ===========================================
# Subclass: Linux
class LinuxTCPConnectionInfo(TCPConnectionInfo):
"""
This is a TCP Connection Info evaluation strategy class
that utilizes information from Linux's procfs. While less universal,
does allow Linux targets to not require an additional library.
"""
platform = 'Linux'
distribution = None
source_file = {
socket.AF_INET: '/proc/net/tcp',
socket.AF_INET6: '/proc/net/tcp6'
}
match_all_ips = {
socket.AF_INET: '00000000',
socket.AF_INET6: '00000000000000000000000000000000',
}
ipv4_mapped_ipv6_address = {
'prefix': '0000000000000000FFFF0000',
'match_all': '0000000000000000FFFF000000000000'
}
local_address_field = 1
remote_address_field = 2
connection_state_field = 3
def __init__(self, module):
self.module = module
self.ips = _convert_host_to_hex(module.params['host'])
self.port = "%0.4X" % int(module.params['port'])
self.exclude_ips = self._get_exclude_ips()
def _get_exclude_ips(self):
exclude_hosts = self.module.params['exclude_hosts']
exclude_ips = []
if exclude_hosts is not None:
for host in exclude_hosts:
exclude_ips.extend(_convert_host_to_hex(host))
return exclude_ips
def get_active_connections_count(self):
active_connections = 0
for family in self.source_file.keys():
if not os.path.isfile(self.source_file[family]):
continue
try:
f = open(self.source_file[family])
for tcp_connection in f.readlines():
tcp_connection = tcp_connection.strip().split()
if tcp_connection[self.local_address_field] == 'local_address':
continue
if (tcp_connection[self.connection_state_field] not in
[get_connection_state_id(_connection_state) for _connection_state in self.module.params['active_connection_states']]):
continue
(local_ip, local_port) = tcp_connection[self.local_address_field].split(':')
if self.port != local_port:
continue
(remote_ip, remote_port) = tcp_connection[self.remote_address_field].split(':')
if (family, remote_ip) in self.exclude_ips:
continue
if any((
(family, local_ip) in self.ips,
(family, self.match_all_ips[family]) in self.ips,
local_ip.startswith(self.ipv4_mapped_ipv6_address['prefix']) and
(family, self.ipv4_mapped_ipv6_address['match_all']) in self.ips,
)):
active_connections += 1
except IOError as e:
pass
finally:
f.close()
return active_connections
def _convert_host_to_ip(host):
"""
Perform forward DNS resolution on host, IP will give the same IP
Args:
host: String with either hostname, IPv4, or IPv6 address
Returns:
List of tuples containing address family and IP
"""
addrinfo = socket.getaddrinfo(host, 80, 0, 0, socket.SOL_TCP)
ips = []
for family, socktype, proto, canonname, sockaddr in addrinfo:
ip = sockaddr[0]
ips.append((family, ip))
if family == socket.AF_INET:
ips.append((socket.AF_INET6, "::ffff:" + ip))
return ips
def _convert_host_to_hex(host):
"""
Convert the provided host to the format in /proc/net/tcp*
/proc/net/tcp uses little-endian four byte hex for ipv4
/proc/net/tcp6 uses little-endian per 4B word for ipv6
Args:
host: String with either hostname, IPv4, or IPv6 address
Returns:
List of tuples containing address family and the
little-endian converted host
"""
ips = []
if host is not None:
for family, ip in _convert_host_to_ip(host):
hexip_nf = binascii.b2a_hex(socket.inet_pton(family, ip))
hexip_hf = ""
for i in range(0, len(hexip_nf), 8):
ipgroup_nf = hexip_nf[i:i + 8]
ipgroup_hf = socket.ntohl(int(ipgroup_nf, base=16))
hexip_hf = "%s%08X" % (hexip_hf, ipgroup_hf)
ips.append((family, hexip_hf))
return ips
def _timedelta_total_seconds(timedelta):
return (
timedelta.microseconds + 0.0 +
(timedelta.seconds + timedelta.days * 24 * 3600) * 10 ** 6) / 10 ** 6
def get_connection_state_id(state):
connection_state_id = {
'ESTABLISHED': '01',
'SYN_SENT': '02',
'SYN_RECV': '03',
'FIN_WAIT1': '04',
'FIN_WAIT2': '05',
'TIME_WAIT': '06',
}
return connection_state_id[state]
def main():
module = AnsibleModule(
argument_spec=dict(
host=dict(type='str', default='127.0.0.1'),
timeout=dict(type='int', default=300),
connect_timeout=dict(type='int', default=5),
delay=dict(type='int', default=0),
port=dict(type='int'),
active_connection_states=dict(type='list', elements='str', default=['ESTABLISHED', 'FIN_WAIT1', 'FIN_WAIT2', 'SYN_RECV', 'SYN_SENT', 'TIME_WAIT']),
path=dict(type='path'),
search_regex=dict(type='str'),
state=dict(type='str', default='started', choices=['absent', 'drained', 'present', 'started', 'stopped']),
exclude_hosts=dict(type='list', elements='str'),
sleep=dict(type='int', default=1),
msg=dict(type='str'),
),
)
host = module.params['host']
timeout = module.params['timeout']
connect_timeout = module.params['connect_timeout']
delay = module.params['delay']
port = module.params['port']
state = module.params['state']
path = module.params['path']
b_path = to_bytes(path, errors='surrogate_or_strict', nonstring='passthru')
search_regex = module.params['search_regex']
b_search_regex = to_bytes(search_regex, errors='surrogate_or_strict', nonstring='passthru')
msg = module.params['msg']
if search_regex is not None:
try:
b_compiled_search_re = re.compile(b_search_regex, re.MULTILINE)
except re.error as e:
module.fail_json(msg="Invalid regular expression: %s" % e)
else:
b_compiled_search_re = None
match_groupdict = {}
match_groups = ()
if port and path:
module.fail_json(msg="port and path parameter can not both be passed to wait_for", elapsed=0)
if path and state == 'stopped':
module.fail_json(msg="state=stopped should only be used for checking a port in the wait_for module", elapsed=0)
if path and state == 'drained':
module.fail_json(msg="state=drained should only be used for checking a port in the wait_for module", elapsed=0)
if module.params['exclude_hosts'] is not None and state != 'drained':
module.fail_json(msg="exclude_hosts should only be with state=drained", elapsed=0)
for _connection_state in module.params['active_connection_states']:
try:
get_connection_state_id(_connection_state)
except Exception:
module.fail_json(msg="unknown active_connection_state (%s) defined" % _connection_state, elapsed=0)
start = utcnow()
if delay:
time.sleep(delay)
if not port and not path and state != 'drained':
time.sleep(timeout)
elif state in ['absent', 'stopped']:
# first wait for the stop condition
end = start + datetime.timedelta(seconds=timeout)
while utcnow() < end:
if path:
try:
if not os.access(b_path, os.F_OK):
break
except IOError:
break
elif port:
try:
s = socket.create_connection((host, port), connect_timeout)
s.shutdown(socket.SHUT_RDWR)
s.close()
except Exception:
break
# Conditions not yet met, wait and try again
time.sleep(module.params['sleep'])
else:
elapsed = utcnow() - start
if port:
module.fail_json(msg=msg or "Timeout when waiting for %s:%s to stop." % (host, port), elapsed=elapsed.seconds)
elif path:
module.fail_json(msg=msg or "Timeout when waiting for %s to be absent." % (path), elapsed=elapsed.seconds)
elif state in ['started', 'present']:
# wait for start condition
end = start + datetime.timedelta(seconds=timeout)
while utcnow() < end:
if path:
try:
os.stat(b_path)
except OSError as e:
# If anything except file not present, throw an error
if e.errno != 2:
elapsed = utcnow() - start
module.fail_json(msg=msg or "Failed to stat %s, %s" % (path, e.strerror), elapsed=elapsed.seconds)
# file doesn't exist yet, so continue
else:
# File exists. Are there additional things to check?
if not b_compiled_search_re:
# nope, succeed!
break
try:
with open(b_path, 'rb') as f:
with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as mm:
search = b_compiled_search_re.search(mm)
if search:
if search.groupdict():
match_groupdict = search.groupdict()
if search.groups():
match_groups = search.groups()
break
except IOError:
pass
elif port:
alt_connect_timeout = math.ceil(_timedelta_total_seconds(end - utcnow()))
try:
s = socket.create_connection((host, port), min(connect_timeout, alt_connect_timeout))
except Exception:
# Failed to connect by connect_timeout. wait and try again
pass
else:
# Connected -- are there additional conditions?
if b_compiled_search_re:
b_data = b''
matched = False
while utcnow() < end:
max_timeout = math.ceil(_timedelta_total_seconds(end - utcnow()))
readable = select.select([s], [], [], max_timeout)[0]
if not readable:
# No new data. Probably means our timeout
# expired
continue
response = s.recv(1024)
if not response:
# Server shutdown
break
b_data += response
if b_compiled_search_re.search(b_data):
matched = True
break
# Shutdown the client socket
try:
s.shutdown(socket.SHUT_RDWR)
except socket.error as e:
if e.errno != errno.ENOTCONN:
raise
# else, the server broke the connection on its end, assume it's not ready
else:
s.close()
if matched:
# Found our string, success!
break
else:
# Connection established, success!
try:
s.shutdown(socket.SHUT_RDWR)
except socket.error as e:
if e.errno != errno.ENOTCONN:
raise
# else, the server broke the connection on its end, assume it's not ready
else:
s.close()
break
# Conditions not yet met, wait and try again
time.sleep(module.params['sleep'])
else: # while-else
# Timeout expired
elapsed = utcnow() - start
if port:
if search_regex:
module.fail_json(msg=msg or "Timeout when waiting for search string %s in %s:%s" % (search_regex, host, port), elapsed=elapsed.seconds)
else:
module.fail_json(msg=msg or "Timeout when waiting for %s:%s" % (host, port), elapsed=elapsed.seconds)
elif path:
if search_regex:
module.fail_json(msg=msg or "Timeout when waiting for search string %s in %s" % (search_regex, path), elapsed=elapsed.seconds)
else:
module.fail_json(msg=msg or "Timeout when waiting for file %s" % (path), elapsed=elapsed.seconds)
elif state == 'drained':
# wait until all active connections are gone
end = start + datetime.timedelta(seconds=timeout)
tcpconns = TCPConnectionInfo(module)
while utcnow() < end:
if tcpconns.get_active_connections_count() == 0:
break
# Conditions not yet met, wait and try again
time.sleep(module.params['sleep'])
else:
elapsed = utcnow() - start
module.fail_json(msg=msg or "Timeout when waiting for %s:%s to drain" % (host, port), elapsed=elapsed.seconds)
elapsed = utcnow() - start
module.exit_json(state=state, port=port, search_regex=search_regex, match_groups=match_groups, match_groupdict=match_groupdict, path=path,
elapsed=elapsed.seconds)
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 79,683 |
wait_for no longer works for files in e.g. sysfs because it uses mmap() instead of read()
|
### Summary
We have an ansible task using `wait_for` module to wait for network device carrier to be up:
```
wait_for:
path: "/sys/class/net/{{ device }}/carrier"
search_regex: "1"
timeout: 60
```
This no longer works since https://github.com/ansible/ansible/commit/9d4ced1237380051334b54379ff64e45c0341a6d, because `mmap()` is now used instead of `read()`, and it isn't supported by `sysfs` in this case.
Do we really need to use `mmap()`? I imagine it might cause issues with other pseudofilesystems too.
In the meantime, we use a workaround: use grep in a loop instead
```
command:
cmd: "grep 1 /sys/class/net/{{ device }}/carrier"
timeout: 1
retries: 60
register: carrier
until: carrier is successful
```
### Issue Type
Bug Report
### Component Name
wait_for
### Ansible Version
```console
$ ansible --version
ansible [core 2.13.7]
config file = None
configured module search path = ['/Users/rob.muir/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /Users/rob.muir/workspace/trash/build/.env/lib/python3.10/site-packages/ansible
ansible collection location = /Users/rob.muir/.ansible/collections:/usr/share/ansible/collections
executable location = /Users/rob.muir/workspace/trash/build/.env/bin/ansible
python version = 3.10.7 (main, Sep 15 2022, 01:50:27) [Clang 12.0.0 (clang-1200.0.32.29)]
jinja version = 3.0.3
libyaml = False
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
```
### OS / Environment
Mac OS controller, Centos Stream 9 target
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```
# set device to say, eth0 or whatever is on your computer
wait_for:
path: "/sys/class/net/{{ device }}/carrier"
search_regex: "1"
timeout: 60
```
### Expected Results
Expect wait_for to still work with files in /sys
### Actual Results
```console
The wait_for will fail, only with recent ansible versions, due to the use of mmap()
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/79683
|
https://github.com/ansible/ansible/pull/82064
|
f6d7dd0840c079d0d2c2e3d8852b952462423a78
|
8b102dca4a236efcf645e8c17721da549cda61c5
| 2023-01-06T15:47:11Z |
python
| 2023-11-16T20:20:52Z |
test/integration/targets/wait_for/tasks/main.yml
|
---
- name: test wait_for with delegate_to
wait_for:
timeout: 2
delegate_to: localhost
register: waitfor
- assert:
that:
- waitfor is successful
- waitfor.elapsed >= 2
- name: setup create a directory to serve files from
file:
dest: "{{ files_dir }}"
state: directory
- name: setup webserver
copy:
src: "testserver.py"
dest: "{{ remote_tmp_dir }}/testserver.py"
- name: setup a path
file:
path: "{{ remote_tmp_dir }}/wait_for_file"
state: touch
- name: setup remove a file after 3s
shell: sleep 3 && rm {{ remote_tmp_dir }}/wait_for_file
async: 20
poll: 0
- name: test for absent path
wait_for:
path: "{{ remote_tmp_dir }}/wait_for_file"
state: absent
timeout: 20
register: waitfor
- name: verify test for absent path
assert:
that:
- waitfor is successful
- waitfor.path == "{{ remote_tmp_dir | expanduser }}/wait_for_file"
- waitfor.elapsed >= 2
- waitfor.elapsed <= 15
- name: setup create a file after 3s
shell: sleep 3 && touch {{ remote_tmp_dir }}/wait_for_file
async: 20
poll: 0
- name: test for present path
wait_for:
path: "{{ remote_tmp_dir }}/wait_for_file"
timeout: 5
register: waitfor
- name: verify test for absent path
assert:
that:
- waitfor is successful
- waitfor.path == "{{ remote_tmp_dir | expanduser }}/wait_for_file"
- waitfor.elapsed >= 2
- waitfor.elapsed <= 15
- name: setup write keyword to file after 3s
shell: sleep 3 && echo completed > {{remote_tmp_dir}}/wait_for_keyword
async: 20
poll: 0
- name: test wait for keyword in file
wait_for:
path: "{{remote_tmp_dir}}/wait_for_keyword"
search_regex: completed
timeout: 5
register: waitfor
- name: verify test wait for keyword in file
assert:
that:
- waitfor is successful
- "waitfor.search_regex == 'completed'"
- waitfor.elapsed >= 2
- waitfor.elapsed <= 15
- name: setup write keyword to file after 3s
shell: sleep 3 && echo "completed data 123" > {{remote_tmp_dir}}/wait_for_keyword
async: 20
poll: 0
- name: test wait for keyword in file with match groups
wait_for:
path: "{{remote_tmp_dir}}/wait_for_keyword"
search_regex: completed (?P<foo>\w+) ([0-9]+)
timeout: 5
register: waitfor
- name: verify test wait for keyword in file with match groups
assert:
that:
- waitfor is successful
- waitfor.elapsed >= 2
- waitfor.elapsed <= 15
- waitfor['match_groupdict'] | length == 1
- waitfor['match_groupdict']['foo'] == 'data'
- waitfor['match_groups'] == ['data', '123']
- name: write non-ascii file
script: write_utf16.py "{{remote_tmp_dir}}/utf16.txt"
args:
executable: '{{ ansible_facts.python.executable }}'
- name: test non-ascii file
wait_for:
path: "{{remote_tmp_dir}}/utf16.txt"
search_regex: completed
- name: test wait for port timeout
wait_for:
port: 12121
timeout: 3
register: waitfor
ignore_errors: true
- name: verify test wait for port timeout
assert:
that:
- waitfor is failed
- waitfor.elapsed == 3
- "waitfor.msg == 'Timeout when waiting for 127.0.0.1:12121'"
- name: test fail with custom msg
wait_for:
port: 12121
msg: fail with custom message
timeout: 3
register: waitfor
ignore_errors: true
- name: verify test fail with custom msg
assert:
that:
- waitfor is failed
- waitfor.elapsed == 3
- "waitfor.msg == 'fail with custom message'"
- name: setup start SimpleHTTPServer
shell: sleep 3 && cd {{ files_dir }} && {{ ansible_python.executable }} {{ remote_tmp_dir}}/testserver.py {{ http_port }}
async: 120 # this test set can take ~1m to run on FreeBSD (via Shippable)
poll: 0
- name: test wait for port with sleep
wait_for:
port: "{{ http_port }}"
sleep: 3
register: waitfor
- name: verify test wait for port sleep
assert:
that:
- waitfor is successful
- waitfor is not changed
- "waitfor.port == {{ http_port }}"
- name: install psutil using pip (non-Linux only)
pip:
name: psutil==5.8.0
when: ansible_system != 'Linux'
- name: Copy zombie.py
copy:
src: zombie.py
dest: "{{ remote_tmp_dir }}"
- name: Create zombie process
shell: "{{ ansible_python.executable }} {{ remote_tmp_dir }}/zombie"
async: 90
poll: 0
- name: test wait for port drained
wait_for:
port: "{{ http_port }}"
state: drained
register: waitfor
- name: verify test wait for port
assert:
that:
- waitfor is successful
- waitfor is not changed
- "waitfor.port == {{ http_port }}"
- name: test wait_for with delay
wait_for:
timeout: 2
delay: 2
register: waitfor
- name: verify test wait_for with delay
assert:
that:
- waitfor is successful
- waitfor.elapsed >= 4
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,018 |
Add Fedora 39 to ansible-test
|
### Summary
This is a remote VM and container addition.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82018
|
https://github.com/ansible/ansible/pull/82218
|
8fd1aa0d2e205ed9836fa2d4ea566faed8b857de
|
fbdb666411f0d2c833e2a74cbf35593b22abb69f
| 2023-10-18T19:38:26Z |
python
| 2023-11-17T02:30:13Z |
.azure-pipelines/azure-pipelines.yml
|
trigger:
batch: true
branches:
include:
- devel
- stable-*
pr:
autoCancel: true
branches:
include:
- devel
- stable-*
schedules:
- cron: 0 7 * * *
displayName: Nightly
always: true
branches:
include:
- devel
- stable-*
variables:
- name: checkoutPath
value: ansible
- name: coverageBranches
value: devel
- name: entryPoint
value: .azure-pipelines/commands/entry-point.sh
- name: fetchDepth
value: 500
- name: defaultContainer
value: quay.io/ansible/azure-pipelines-test-container:4.0.1
pool: Standard
stages:
- stage: Sanity
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: sanity/{0}
targets:
- test: 1
- test: 2
- stage: Units
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: units/{0}
targets:
- test: 3.7
- test: 3.8
- test: 3.9
- test: '3.10'
- test: 3.11
- test: 3.12
- stage: Windows
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Server {0}
testFormat: windows/{0}/1
targets:
- test: 2016
- test: 2019
- test: 2022
- stage: Remote
dependsOn: []
jobs:
- template: templates/matrix.yml # context/target
parameters:
targets:
- name: macOS 13.2
test: macos/13.2
- name: RHEL 9.3 py39
test: rhel/[email protected]
- name: RHEL 9.3 py311
test: rhel/[email protected]
- name: FreeBSD 13.2
test: freebsd/13.2
groups:
- 1
- 2
- template: templates/matrix.yml # context/controller
parameters:
targets:
- name: macOS 13.2
test: macos/13.2
- name: RHEL 9.3
test: rhel/9.3
- name: FreeBSD 13.2
test: freebsd/13.2
groups:
- 3
- 4
- 5
- template: templates/matrix.yml # context/controller (ansible-test container management)
parameters:
targets:
- name: Alpine 3.18
test: alpine/3.18
- name: Fedora 38
test: fedora/38
- name: RHEL 9.3
test: rhel/9.3
- name: Ubuntu 22.04
test: ubuntu/22.04
groups:
- 6
- stage: Docker
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: linux/{0}
targets:
- name: Alpine 3
test: alpine3
- name: Fedora 38
test: fedora38
- name: Ubuntu 20.04
test: ubuntu2004
- name: Ubuntu 22.04
test: ubuntu2204
groups:
- 1
- 2
- template: templates/matrix.yml
parameters:
testFormat: linux/{0}
targets:
- name: Alpine 3
test: alpine3
- name: Fedora 38
test: fedora38
- name: Ubuntu 22.04
test: ubuntu2204
groups:
- 3
- 4
- 5
- stage: Galaxy
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: galaxy/{0}/1
targets:
- test: '3.10'
- test: 3.11
- test: 3.12
- stage: Generic
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: generic/{0}/1
targets:
- test: '3.10'
- test: 3.11
- test: 3.12
- stage: Incidental_Windows
displayName: Incidental Windows
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Server {0}
testFormat: i/windows/{0}
targets:
- test: 2016
- test: 2019
- test: 2022
- stage: Incidental
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: i/{0}/1
targets:
- name: IOS Python
test: ios/csr1000v/
- name: VyOS Python
test: vyos/1.1.8/
- stage: Summary
condition: succeededOrFailed()
dependsOn:
- Sanity
- Units
- Windows
- Remote
- Docker
- Galaxy
- Generic
- Incidental_Windows
- Incidental
jobs:
- template: templates/coverage.yml
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,018 |
Add Fedora 39 to ansible-test
|
### Summary
This is a remote VM and container addition.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82018
|
https://github.com/ansible/ansible/pull/82218
|
8fd1aa0d2e205ed9836fa2d4ea566faed8b857de
|
fbdb666411f0d2c833e2a74cbf35593b22abb69f
| 2023-10-18T19:38:26Z |
python
| 2023-11-17T02:30:13Z |
changelogs/fragments/ansible-test-added-fedora-39.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,018 |
Add Fedora 39 to ansible-test
|
### Summary
This is a remote VM and container addition.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82018
|
https://github.com/ansible/ansible/pull/82218
|
8fd1aa0d2e205ed9836fa2d4ea566faed8b857de
|
fbdb666411f0d2c833e2a74cbf35593b22abb69f
| 2023-10-18T19:38:26Z |
python
| 2023-11-17T02:30:13Z |
test/lib/ansible_test/_data/completion/docker.txt
|
base image=quay.io/ansible/base-test-container:6.0.0 python=3.12,3.7,3.8,3.9,3.10,3.11
default image=quay.io/ansible/default-test-container:9.3.0 python=3.12,3.7,3.8,3.9,3.10,3.11 context=collection
default image=quay.io/ansible/ansible-core-test-container:9.3.0 python=3.12,3.7,3.8,3.9,3.10,3.11 context=ansible-core
alpine3 image=quay.io/ansible/alpine3-test-container:6.3.0 python=3.11 cgroup=none audit=none
fedora38 image=quay.io/ansible/fedora38-test-container:6.3.0 python=3.11
ubuntu2004 image=quay.io/ansible/ubuntu2004-test-container:6.3.0 python=3.8
ubuntu2204 image=quay.io/ansible/ubuntu2204-test-container:6.3.0 python=3.10
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,018 |
Add Fedora 39 to ansible-test
|
### Summary
This is a remote VM and container addition.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82018
|
https://github.com/ansible/ansible/pull/82218
|
8fd1aa0d2e205ed9836fa2d4ea566faed8b857de
|
fbdb666411f0d2c833e2a74cbf35593b22abb69f
| 2023-10-18T19:38:26Z |
python
| 2023-11-17T02:30:13Z |
test/lib/ansible_test/_data/completion/remote.txt
|
alpine/3.18 python=3.11 become=doas_sudo provider=aws arch=x86_64
alpine become=doas_sudo provider=aws arch=x86_64
fedora/38 python=3.11 become=sudo provider=aws arch=x86_64
fedora become=sudo provider=aws arch=x86_64
freebsd/13.2 python=3.9,3.11 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
freebsd python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
macos/13.2 python=3.11 python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
macos python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
rhel/9.2 python=3.9,3.11 become=sudo provider=aws arch=x86_64
rhel/9.3 python=3.9,3.11 become=sudo provider=aws arch=x86_64
rhel become=sudo provider=aws arch=x86_64
ubuntu/22.04 python=3.10 become=sudo provider=aws arch=x86_64
ubuntu become=sudo provider=aws arch=x86_64
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,699 |
unarchive skipping valid archives due to pcs check
|
### Summary
The unarchive is skipping valid archives due to the pcs check found [here](https://github.com/ansible/ansible/blob/6f65397871d089681fec5380b9ac17b62fb4e8e1/lib/ansible/modules/unarchive.py#L502C1-L504C25):
```
# Check first and seventh field in order to skip header/footer
if len(pcs[0]) != 7 and len(pcs[0]) != 10:
continue
```
The zipinfo output of the zip in use by this playbook:
```
zipinfo -T -s /tmp/t/1.zip
Archive: /tmp/t/1.zip
Archive size: 14848 bytes; Members: 4
-rw-a--- 2.0 fat 2538 t- defN 20230913.162426 deployment/scripts/setup.sh
-rw-a--- 2.0 fat 743 t- defN 20230522.135636 1.ansible.vault
-rw-a--- 2.0 fat 873 t- defN 20230911.104146 2.ansible.vault
-rw-a--- 2.0 fat 12816 b- stor 20230913.162542 scripts.zip
Members: 4; Bytes uncompressed: 16970, compressed: 14362, 15.4%
Directories: 0, Files: 4, Links: 0
```
Notice the first field (pcs[0]) is 8 characters (not 7 or 10 as expected). This zip extracts just fine using unzip.
### Issue Type
Bug Report
### Component Name
unarchive
### Ansible Version
```console
$ ansible --version
ansible [core 2.13.3]
config file = /root/.ansible.cfg
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /bin/ansible
python version = 3.9.16 (main, May 31 2023, 12:21:58) [GCC 8.5.0 20210514 (Red Hat 8.5.0-18)]
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CACHE_PLUGIN(/root/.ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/root/.ansible.cfg) = /tmp/facts_cache
CACHE_PLUGIN_TIMEOUT(/root/.ansible.cfg) = 7200
DEFAULT_FORKS(/root/.ansible.cfg) = 50
DEFAULT_GATHERING(/root/.ansible.cfg) = smart
DEFAULT_REMOTE_USER(/root/.ansible.cfg) = ansible
HOST_KEY_CHECKING(/root/.ansible.cfg) = False
CACHE:
=====
jsonfile:
________
_timeout(/root/.ansible.cfg) = 7200
_uri(/root/.ansible.cfg) = /tmp/facts_cache
CONNECTION:
==========
paramiko_ssh:
____________
host_key_checking(/root/.ansible.cfg) = False
remote_user(/root/.ansible.cfg) = ansible
ssh:
___
host_key_checking(/root/.ansible.cfg) = False
pipelining(/root/.ansible.cfg) = True
remote_user(/root/.ansible.cfg) = ansible
ssh_args(/root/.ansible.cfg) = -o ControlMaster=auto -o ControlPersist=60s -o PreferredAuthentications=publickey
```
### OS / Environment
RHEL 8
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: unarchive
ansible.builtin.unarchive:
src: /tmp/t/1.zip
dest: /tmp/t
remote_src= yes
```
### Expected Results
changed: [HOSTNAME]
### Actual Results
```console
ok: [HOSTNAME]
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81699
|
https://github.com/ansible/ansible/pull/81705
|
ce9d268ab88eee1e69dfdd6bf853d021d2b7d13d
|
7dde4901d42e4c043adbd980c941b97cd3237bb6
| 2023-09-14T19:20:51Z |
python
| 2023-11-22T00:48:31Z |
changelogs/fragments/81699-zip-permission.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,699 |
unarchive skipping valid archives due to pcs check
|
### Summary
The unarchive is skipping valid archives due to the pcs check found [here](https://github.com/ansible/ansible/blob/6f65397871d089681fec5380b9ac17b62fb4e8e1/lib/ansible/modules/unarchive.py#L502C1-L504C25):
```
# Check first and seventh field in order to skip header/footer
if len(pcs[0]) != 7 and len(pcs[0]) != 10:
continue
```
The zipinfo output of the zip in use by this playbook:
```
zipinfo -T -s /tmp/t/1.zip
Archive: /tmp/t/1.zip
Archive size: 14848 bytes; Members: 4
-rw-a--- 2.0 fat 2538 t- defN 20230913.162426 deployment/scripts/setup.sh
-rw-a--- 2.0 fat 743 t- defN 20230522.135636 1.ansible.vault
-rw-a--- 2.0 fat 873 t- defN 20230911.104146 2.ansible.vault
-rw-a--- 2.0 fat 12816 b- stor 20230913.162542 scripts.zip
Members: 4; Bytes uncompressed: 16970, compressed: 14362, 15.4%
Directories: 0, Files: 4, Links: 0
```
Notice the first field (pcs[0]) is 8 characters (not 7 or 10 as expected). This zip extracts just fine using unzip.
### Issue Type
Bug Report
### Component Name
unarchive
### Ansible Version
```console
$ ansible --version
ansible [core 2.13.3]
config file = /root/.ansible.cfg
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /bin/ansible
python version = 3.9.16 (main, May 31 2023, 12:21:58) [GCC 8.5.0 20210514 (Red Hat 8.5.0-18)]
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CACHE_PLUGIN(/root/.ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/root/.ansible.cfg) = /tmp/facts_cache
CACHE_PLUGIN_TIMEOUT(/root/.ansible.cfg) = 7200
DEFAULT_FORKS(/root/.ansible.cfg) = 50
DEFAULT_GATHERING(/root/.ansible.cfg) = smart
DEFAULT_REMOTE_USER(/root/.ansible.cfg) = ansible
HOST_KEY_CHECKING(/root/.ansible.cfg) = False
CACHE:
=====
jsonfile:
________
_timeout(/root/.ansible.cfg) = 7200
_uri(/root/.ansible.cfg) = /tmp/facts_cache
CONNECTION:
==========
paramiko_ssh:
____________
host_key_checking(/root/.ansible.cfg) = False
remote_user(/root/.ansible.cfg) = ansible
ssh:
___
host_key_checking(/root/.ansible.cfg) = False
pipelining(/root/.ansible.cfg) = True
remote_user(/root/.ansible.cfg) = ansible
ssh_args(/root/.ansible.cfg) = -o ControlMaster=auto -o ControlPersist=60s -o PreferredAuthentications=publickey
```
### OS / Environment
RHEL 8
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: unarchive
ansible.builtin.unarchive:
src: /tmp/t/1.zip
dest: /tmp/t
remote_src= yes
```
### Expected Results
changed: [HOSTNAME]
### Actual Results
```console
ok: [HOSTNAME]
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81699
|
https://github.com/ansible/ansible/pull/81705
|
ce9d268ab88eee1e69dfdd6bf853d021d2b7d13d
|
7dde4901d42e4c043adbd980c941b97cd3237bb6
| 2023-09-14T19:20:51Z |
python
| 2023-11-22T00:48:31Z |
lib/ansible/modules/unarchive.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Michael DeHaan <[email protected]>
# Copyright: (c) 2013, Dylan Martin <[email protected]>
# Copyright: (c) 2015, Toshio Kuratomi <[email protected]>
# Copyright: (c) 2016, Dag Wieers <[email protected]>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = r'''
---
module: unarchive
version_added: '1.4'
short_description: Unpacks an archive after (optionally) copying it from the local machine
description:
- The M(ansible.builtin.unarchive) module unpacks an archive. It will not unpack a compressed file that does not contain an archive.
- By default, it will copy the source file from the local system to the target before unpacking.
- Set O(remote_src=yes) to unpack an archive which already exists on the target.
- If checksum validation is desired, use M(ansible.builtin.get_url) or M(ansible.builtin.uri) instead to fetch the file and set O(remote_src=yes).
- For Windows targets, use the M(community.windows.win_unzip) module instead.
options:
src:
description:
- If O(remote_src=no) (default), local path to archive file to copy to the target server; can be absolute or relative. If O(remote_src=yes), path on the
target server to existing archive file to unpack.
- If O(remote_src=yes) and O(src) contains V(://), the remote machine will download the file from the URL first. (version_added 2.0). This is only for
simple cases, for full download support use the M(ansible.builtin.get_url) module.
type: path
required: true
dest:
description:
- Remote absolute path where the archive should be unpacked.
- The given path must exist. Base directory is not created by this module.
type: path
required: true
copy:
description:
- If true, the file is copied from local controller to the managed (remote) node, otherwise, the plugin will look for src archive on the managed machine.
- This option has been deprecated in favor of O(remote_src).
- This option is mutually exclusive with O(remote_src).
type: bool
default: yes
creates:
description:
- If the specified absolute path (file or directory) already exists, this step will B(not) be run.
- The specified absolute path (file or directory) must be below the base path given with O(dest).
type: path
version_added: "1.6"
io_buffer_size:
description:
- Size of the volatile memory buffer that is used for extracting files from the archive in bytes.
type: int
default: 65536
version_added: "2.12"
list_files:
description:
- If set to True, return the list of files that are contained in the tarball.
type: bool
default: no
version_added: "2.0"
exclude:
description:
- List the directory and file entries that you would like to exclude from the unarchive action.
- Mutually exclusive with O(include).
type: list
default: []
elements: str
version_added: "2.1"
include:
description:
- List of directory and file entries that you would like to extract from the archive. If O(include)
is not empty, only files listed here will be extracted.
- Mutually exclusive with O(exclude).
type: list
default: []
elements: str
version_added: "2.11"
keep_newer:
description:
- Do not replace existing files that are newer than files from the archive.
type: bool
default: no
version_added: "2.1"
extra_opts:
description:
- Specify additional options by passing in an array.
- Each space-separated command-line option should be a new element of the array. See examples.
- Command-line options with multiple elements must use multiple lines in the array, one for each element.
type: list
elements: str
default: []
version_added: "2.1"
remote_src:
description:
- Set to V(true) to indicate the archived file is already on the remote system and not local to the Ansible controller.
- This option is mutually exclusive with O(copy).
type: bool
default: no
version_added: "2.2"
validate_certs:
description:
- This only applies if using a https URL as the source of the file.
- This should only set to V(false) used on personally controlled sites using self-signed certificate.
- Prior to 2.2 the code worked as if this was set to V(true).
type: bool
default: yes
version_added: "2.2"
extends_documentation_fragment:
- action_common_attributes
- action_common_attributes.flow
- action_common_attributes.files
- decrypt
- files
attributes:
action:
support: full
async:
support: none
bypass_host_loop:
support: none
check_mode:
support: partial
details: Not supported for gzipped tar files.
diff_mode:
support: partial
details: Uses gtar's C(--diff) arg to calculate if changed or not. If this C(arg) is not supported, it will always unpack the archive.
platform:
platforms: posix
safe_file_operations:
support: none
vault:
support: full
todo:
- Re-implement tar support using native tarfile module.
- Re-implement zip support using native zipfile module.
notes:
- Requires C(zipinfo) and C(gtar)/C(unzip) command on target host.
- Requires C(zstd) command on target host to expand I(.tar.zst) files.
- Can handle I(.zip) files using C(unzip) as well as I(.tar), I(.tar.gz), I(.tar.bz2), I(.tar.xz), and I(.tar.zst) files using C(gtar).
- Does not handle I(.gz) files, I(.bz2) files, I(.xz), or I(.zst) files that do not contain a I(.tar) archive.
- Existing files/directories in the destination which are not in the archive
are not touched. This is the same behavior as a normal archive extraction.
- Existing files/directories in the destination which are not in the archive
are ignored for purposes of deciding if the archive should be unpacked or not.
seealso:
- module: community.general.archive
- module: community.general.iso_extract
- module: community.windows.win_unzip
author: Michael DeHaan
'''
EXAMPLES = r'''
- name: Extract foo.tgz into /var/lib/foo
ansible.builtin.unarchive:
src: foo.tgz
dest: /var/lib/foo
- name: Unarchive a file that is already on the remote machine
ansible.builtin.unarchive:
src: /tmp/foo.zip
dest: /usr/local/bin
remote_src: yes
- name: Unarchive a file that needs to be downloaded (added in 2.0)
ansible.builtin.unarchive:
src: https://example.com/example.zip
dest: /usr/local/bin
remote_src: yes
- name: Unarchive a file with extra options
ansible.builtin.unarchive:
src: /tmp/foo.zip
dest: /usr/local/bin
extra_opts:
- --transform
- s/^xxx/yyy/
'''
RETURN = r'''
dest:
description: Path to the destination directory.
returned: always
type: str
sample: /opt/software
files:
description: List of all the files in the archive.
returned: When O(list_files) is V(True)
type: list
sample: '["file1", "file2"]'
gid:
description: Numerical ID of the group that owns the destination directory.
returned: always
type: int
sample: 1000
group:
description: Name of the group that owns the destination directory.
returned: always
type: str
sample: "librarians"
handler:
description: Archive software handler used to extract and decompress the archive.
returned: always
type: str
sample: "TgzArchive"
mode:
description: String that represents the octal permissions of the destination directory.
returned: always
type: str
sample: "0755"
owner:
description: Name of the user that owns the destination directory.
returned: always
type: str
sample: "paul"
size:
description: The size of destination directory in bytes. Does not include the size of files or subdirectories contained within.
returned: always
type: int
sample: 36
src:
description:
- The source archive's path.
- If O(src) was a remote web URL, or from the local ansible controller, this shows the temporary location where the download was stored.
returned: always
type: str
sample: "/home/paul/test.tar.gz"
state:
description: State of the destination. Effectively always "directory".
returned: always
type: str
sample: "directory"
uid:
description: Numerical ID of the user that owns the destination directory.
returned: always
type: int
sample: 1000
'''
import binascii
import codecs
import datetime
import fnmatch
import grp
import os
import platform
import pwd
import re
import stat
import time
import traceback
from functools import partial
from zipfile import ZipFile
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.locale import get_best_parsable_locale
from ansible.module_utils.urls import fetch_file
try: # python 3.3+
from shlex import quote # type: ignore[attr-defined]
except ImportError: # older python
from pipes import quote
try: # python 3.2+
from zipfile import BadZipFile # type: ignore[attr-defined]
except ImportError: # older python
from zipfile import BadZipfile as BadZipFile
# String from tar that shows the tar contents are different from the
# filesystem
OWNER_DIFF_RE = re.compile(r': Uid differs$')
GROUP_DIFF_RE = re.compile(r': Gid differs$')
MODE_DIFF_RE = re.compile(r': Mode differs$')
MOD_TIME_DIFF_RE = re.compile(r': Mod time differs$')
# NEWER_DIFF_RE = re.compile(r' is newer or same age.$')
EMPTY_FILE_RE = re.compile(r': : Warning: Cannot stat: No such file or directory$')
MISSING_FILE_RE = re.compile(r': Warning: Cannot stat: No such file or directory$')
ZIP_FILE_MODE_RE = re.compile(r'([r-][w-][SsTtx-]){3}')
INVALID_OWNER_RE = re.compile(r': Invalid owner')
INVALID_GROUP_RE = re.compile(r': Invalid group')
SYMLINK_DIFF_RE = re.compile(r': Symlink differs$')
def crc32(path, buffer_size):
''' Return a CRC32 checksum of a file '''
crc = binascii.crc32(b'')
with open(path, 'rb') as f:
for b_block in iter(partial(f.read, buffer_size), b''):
crc = binascii.crc32(b_block, crc)
return crc & 0xffffffff
def shell_escape(string):
''' Quote meta-characters in the args for the unix shell '''
return re.sub(r'([^A-Za-z0-9_])', r'\\\1', string)
class UnarchiveError(Exception):
pass
class ZipArchive(object):
def __init__(self, src, b_dest, file_args, module):
self.src = src
self.b_dest = b_dest
self.file_args = file_args
self.opts = module.params['extra_opts']
self.module = module
self.io_buffer_size = module.params["io_buffer_size"]
self.excludes = module.params['exclude']
self.includes = []
self.include_files = self.module.params['include']
self.cmd_path = None
self.zipinfo_cmd_path = None
self._files_in_archive = []
self._infodict = dict()
self.zipinfoflag = ''
self.binaries = (
('unzip', 'cmd_path'),
('zipinfo', 'zipinfo_cmd_path'),
)
def _permstr_to_octal(self, modestr, umask):
''' Convert a Unix permission string (rw-r--r--) into a mode (0644) '''
revstr = modestr[::-1]
mode = 0
for j in range(0, 3):
for i in range(0, 3):
if revstr[i + 3 * j] in ['r', 'w', 'x', 's', 't']:
mode += 2 ** (i + 3 * j)
# The unzip utility does not support setting the stST bits
# if revstr[i + 3 * j] in ['s', 't', 'S', 'T' ]:
# mode += 2 ** (9 + j)
return (mode & ~umask)
def _legacy_file_list(self):
rc, out, err = self.module.run_command([self.cmd_path, '-v', self.src])
if rc:
self.module.debug(err)
raise UnarchiveError('Neither python zipfile nor unzip can read %s' % self.src)
for line in out.splitlines()[3:-2]:
fields = line.split(None, 7)
self._files_in_archive.append(fields[7])
self._infodict[fields[7]] = int(fields[6])
def _crc32(self, path):
if self._infodict:
return self._infodict[path]
try:
archive = ZipFile(self.src)
except BadZipFile as e:
if e.args[0].lower().startswith('bad magic number'):
# Python2.4 can't handle zipfiles with > 64K files. Try using
# /usr/bin/unzip instead
self._legacy_file_list()
else:
raise
else:
try:
for item in archive.infolist():
self._infodict[item.filename] = int(item.CRC)
except Exception:
archive.close()
raise UnarchiveError('Unable to list files in the archive')
return self._infodict[path]
@property
def files_in_archive(self):
if self._files_in_archive:
return self._files_in_archive
self._files_in_archive = []
try:
archive = ZipFile(self.src)
except BadZipFile as e:
if e.args[0].lower().startswith('bad magic number'):
# Python2.4 can't handle zipfiles with > 64K files. Try using
# /usr/bin/unzip instead
self._legacy_file_list()
else:
raise
else:
try:
for member in archive.namelist():
if self.include_files:
for include in self.include_files:
if fnmatch.fnmatch(member, include):
self._files_in_archive.append(to_native(member))
else:
exclude_flag = False
if self.excludes:
for exclude in self.excludes:
if fnmatch.fnmatch(member, exclude):
exclude_flag = True
break
if not exclude_flag:
self._files_in_archive.append(to_native(member))
except Exception as e:
archive.close()
raise UnarchiveError('Unable to list files in the archive: %s' % to_native(e))
archive.close()
return self._files_in_archive
def is_unarchived(self):
# BSD unzip doesn't support zipinfo listings with timestamp.
if self.zipinfoflag:
cmd = [self.zipinfo_cmd_path, self.zipinfoflag, '-T', '-s', self.src]
else:
cmd = [self.zipinfo_cmd_path, '-T', '-s', self.src]
if self.excludes:
cmd.extend(['-x', ] + self.excludes)
if self.include_files:
cmd.extend(self.include_files)
rc, out, err = self.module.run_command(cmd)
self.module.debug(err)
old_out = out
diff = ''
out = ''
if rc == 0:
unarchived = True
else:
unarchived = False
# Get some information related to user/group ownership
umask = os.umask(0)
os.umask(umask)
systemtype = platform.system()
# Get current user and group information
groups = os.getgroups()
run_uid = os.getuid()
run_gid = os.getgid()
try:
run_owner = pwd.getpwuid(run_uid).pw_name
except (TypeError, KeyError):
run_owner = run_uid
try:
run_group = grp.getgrgid(run_gid).gr_name
except (KeyError, ValueError, OverflowError):
run_group = run_gid
# Get future user ownership
fut_owner = fut_uid = None
if self.file_args['owner']:
try:
tpw = pwd.getpwnam(self.file_args['owner'])
except KeyError:
try:
tpw = pwd.getpwuid(int(self.file_args['owner']))
except (TypeError, KeyError, ValueError):
tpw = pwd.getpwuid(run_uid)
fut_owner = tpw.pw_name
fut_uid = tpw.pw_uid
else:
try:
fut_owner = run_owner
except Exception:
pass
fut_uid = run_uid
# Get future group ownership
fut_group = fut_gid = None
if self.file_args['group']:
try:
tgr = grp.getgrnam(self.file_args['group'])
except (ValueError, KeyError):
try:
# no need to check isdigit() explicitly here, if we fail to
# parse, the ValueError will be caught.
tgr = grp.getgrgid(int(self.file_args['group']))
except (KeyError, ValueError, OverflowError):
tgr = grp.getgrgid(run_gid)
fut_group = tgr.gr_name
fut_gid = tgr.gr_gid
else:
try:
fut_group = run_group
except Exception:
pass
fut_gid = run_gid
for line in old_out.splitlines():
change = False
pcs = line.split(None, 7)
if len(pcs) != 8:
# Too few fields... probably a piece of the header or footer
continue
# Check first and seventh field in order to skip header/footer
if len(pcs[0]) != 7 and len(pcs[0]) != 10:
continue
if len(pcs[6]) != 15:
continue
# Possible entries:
# -rw-rws--- 1.9 unx 2802 t- defX 11-Aug-91 13:48 perms.2660
# -rw-a-- 1.0 hpf 5358 Tl i4:3 4-Dec-91 11:33 longfilename.hpfs
# -r--ahs 1.1 fat 4096 b- i4:2 14-Jul-91 12:58 EA DATA. SF
# --w------- 1.0 mac 17357 bx i8:2 4-May-92 04:02 unzip.macr
if pcs[0][0] not in 'dl-?' or not frozenset(pcs[0][1:]).issubset('rwxstah-'):
continue
ztype = pcs[0][0]
permstr = pcs[0][1:]
version = pcs[1]
ostype = pcs[2]
size = int(pcs[3])
path = to_text(pcs[7], errors='surrogate_or_strict')
# Skip excluded files
if path in self.excludes:
out += 'Path %s is excluded on request\n' % path
continue
# Itemized change requires L for symlink
if path[-1] == '/':
if ztype != 'd':
err += 'Path %s incorrectly tagged as "%s", but is a directory.\n' % (path, ztype)
ftype = 'd'
elif ztype == 'l':
ftype = 'L'
elif ztype == '-':
ftype = 'f'
elif ztype == '?':
ftype = 'f'
# Some files may be storing FAT permissions, not Unix permissions
# For FAT permissions, we will use a base permissions set of 777 if the item is a directory or has the execute bit set. Otherwise, 666.
# This permission will then be modified by the system UMask.
# BSD always applies the Umask, even to Unix permissions.
# For Unix style permissions on Linux or Mac, we want to use them directly.
# So we set the UMask for this file to zero. That permission set will then be unchanged when calling _permstr_to_octal
if len(permstr) == 6:
if path[-1] == '/':
permstr = 'rwxrwxrwx'
elif permstr == 'rwx---':
permstr = 'rwxrwxrwx'
else:
permstr = 'rw-rw-rw-'
file_umask = umask
elif 'bsd' in systemtype.lower():
file_umask = umask
else:
file_umask = 0
# Test string conformity
if len(permstr) != 9 or not ZIP_FILE_MODE_RE.match(permstr):
raise UnarchiveError('ZIP info perm format incorrect, %s' % permstr)
# DEBUG
# err += "%s%s %10d %s\n" % (ztype, permstr, size, path)
b_dest = os.path.join(self.b_dest, to_bytes(path, errors='surrogate_or_strict'))
try:
st = os.lstat(b_dest)
except Exception:
change = True
self.includes.append(path)
err += 'Path %s is missing\n' % path
diff += '>%s++++++.?? %s\n' % (ftype, path)
continue
# Compare file types
if ftype == 'd' and not stat.S_ISDIR(st.st_mode):
change = True
self.includes.append(path)
err += 'File %s already exists, but not as a directory\n' % path
diff += 'c%s++++++.?? %s\n' % (ftype, path)
continue
if ftype == 'f' and not stat.S_ISREG(st.st_mode):
change = True
unarchived = False
self.includes.append(path)
err += 'Directory %s already exists, but not as a regular file\n' % path
diff += 'c%s++++++.?? %s\n' % (ftype, path)
continue
if ftype == 'L' and not stat.S_ISLNK(st.st_mode):
change = True
self.includes.append(path)
err += 'Directory %s already exists, but not as a symlink\n' % path
diff += 'c%s++++++.?? %s\n' % (ftype, path)
continue
itemized = list('.%s.......??' % ftype)
# Note: this timestamp calculation has a rounding error
# somewhere... unzip and this timestamp can be one second off
# When that happens, we report a change and re-unzip the file
dt_object = datetime.datetime(*(time.strptime(pcs[6], '%Y%m%d.%H%M%S')[0:6]))
timestamp = time.mktime(dt_object.timetuple())
# Compare file timestamps
if stat.S_ISREG(st.st_mode):
if self.module.params['keep_newer']:
if timestamp > st.st_mtime:
change = True
self.includes.append(path)
err += 'File %s is older, replacing file\n' % path
itemized[4] = 't'
elif stat.S_ISREG(st.st_mode) and timestamp < st.st_mtime:
# Add to excluded files, ignore other changes
out += 'File %s is newer, excluding file\n' % path
self.excludes.append(path)
continue
else:
if timestamp != st.st_mtime:
change = True
self.includes.append(path)
err += 'File %s differs in mtime (%f vs %f)\n' % (path, timestamp, st.st_mtime)
itemized[4] = 't'
# Compare file sizes
if stat.S_ISREG(st.st_mode) and size != st.st_size:
change = True
err += 'File %s differs in size (%d vs %d)\n' % (path, size, st.st_size)
itemized[3] = 's'
# Compare file checksums
if stat.S_ISREG(st.st_mode):
crc = crc32(b_dest, self.io_buffer_size)
if crc != self._crc32(path):
change = True
err += 'File %s differs in CRC32 checksum (0x%08x vs 0x%08x)\n' % (path, self._crc32(path), crc)
itemized[2] = 'c'
# Compare file permissions
# Do not handle permissions of symlinks
if ftype != 'L':
# Use the new mode provided with the action, if there is one
if self.file_args['mode']:
if isinstance(self.file_args['mode'], int):
mode = self.file_args['mode']
else:
try:
mode = int(self.file_args['mode'], 8)
except Exception as e:
try:
mode = AnsibleModule._symbolic_mode_to_octal(st, self.file_args['mode'])
except ValueError as e:
self.module.fail_json(path=path, msg="%s" % to_native(e), exception=traceback.format_exc())
# Only special files require no umask-handling
elif ztype == '?':
mode = self._permstr_to_octal(permstr, 0)
else:
mode = self._permstr_to_octal(permstr, file_umask)
if mode != stat.S_IMODE(st.st_mode):
change = True
itemized[5] = 'p'
err += 'Path %s differs in permissions (%o vs %o)\n' % (path, mode, stat.S_IMODE(st.st_mode))
# Compare file user ownership
owner = uid = None
try:
owner = pwd.getpwuid(st.st_uid).pw_name
except (TypeError, KeyError):
uid = st.st_uid
# If we are not root and requested owner is not our user, fail
if run_uid != 0 and (fut_owner != run_owner or fut_uid != run_uid):
raise UnarchiveError('Cannot change ownership of %s to %s, as user %s' % (path, fut_owner, run_owner))
if owner and owner != fut_owner:
change = True
err += 'Path %s is owned by user %s, not by user %s as expected\n' % (path, owner, fut_owner)
itemized[6] = 'o'
elif uid and uid != fut_uid:
change = True
err += 'Path %s is owned by uid %s, not by uid %s as expected\n' % (path, uid, fut_uid)
itemized[6] = 'o'
# Compare file group ownership
group = gid = None
try:
group = grp.getgrgid(st.st_gid).gr_name
except (KeyError, ValueError, OverflowError):
gid = st.st_gid
if run_uid != 0 and (fut_group != run_group or fut_gid != run_gid) and fut_gid not in groups:
raise UnarchiveError('Cannot change group ownership of %s to %s, as user %s' % (path, fut_group, run_owner))
if group and group != fut_group:
change = True
err += 'Path %s is owned by group %s, not by group %s as expected\n' % (path, group, fut_group)
itemized[6] = 'g'
elif gid and gid != fut_gid:
change = True
err += 'Path %s is owned by gid %s, not by gid %s as expected\n' % (path, gid, fut_gid)
itemized[6] = 'g'
# Register changed files and finalize diff output
if change:
if path not in self.includes:
self.includes.append(path)
diff += '%s %s\n' % (''.join(itemized), path)
if self.includes:
unarchived = False
# DEBUG
# out = old_out + out
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd, diff=diff)
def unarchive(self):
cmd = [self.cmd_path, '-o']
if self.opts:
cmd.extend(self.opts)
cmd.append(self.src)
# NOTE: Including (changed) files as arguments is problematic (limits on command line/arguments)
# if self.includes:
# NOTE: Command unzip has this strange behaviour where it expects quoted filenames to also be escaped
# cmd.extend(map(shell_escape, self.includes))
if self.excludes:
cmd.extend(['-x'] + self.excludes)
if self.include_files:
cmd.extend(self.include_files)
cmd.extend(['-d', self.b_dest])
rc, out, err = self.module.run_command(cmd)
return dict(cmd=cmd, rc=rc, out=out, err=err)
def can_handle_archive(self):
missing = []
for b in self.binaries:
try:
setattr(self, b[1], get_bin_path(b[0]))
except ValueError:
missing.append(b[0])
if missing:
return False, "Unable to find required '{missing}' binary in the path.".format(missing="' or '".join(missing))
cmd = [self.cmd_path, '-l', self.src]
rc, out, err = self.module.run_command(cmd)
if rc == 0:
return True, None
self.module.debug(err)
return False, 'Command "%s" could not handle archive: %s' % (self.cmd_path, err)
class TgzArchive(object):
def __init__(self, src, b_dest, file_args, module):
self.src = src
self.b_dest = b_dest
self.file_args = file_args
self.opts = module.params['extra_opts']
self.module = module
if self.module.check_mode:
self.module.exit_json(skipped=True, msg="remote module (%s) does not support check mode when using gtar" % self.module._name)
self.excludes = [path.rstrip('/') for path in self.module.params['exclude']]
self.include_files = self.module.params['include']
self.cmd_path = None
self.tar_type = None
self.zipflag = '-z'
self._files_in_archive = []
def _get_tar_type(self):
cmd = [self.cmd_path, '--version']
(rc, out, err) = self.module.run_command(cmd)
tar_type = None
if out.startswith('bsdtar'):
tar_type = 'bsd'
elif out.startswith('tar') and 'GNU' in out:
tar_type = 'gnu'
return tar_type
@property
def files_in_archive(self):
if self._files_in_archive:
return self._files_in_archive
cmd = [self.cmd_path, '--list', '-C', self.b_dest]
if self.zipflag:
cmd.append(self.zipflag)
if self.opts:
cmd.extend(['--show-transformed-names'] + self.opts)
if self.excludes:
cmd.extend(['--exclude=' + f for f in self.excludes])
cmd.extend(['-f', self.src])
if self.include_files:
cmd.extend(self.include_files)
locale = get_best_parsable_locale(self.module)
rc, out, err = self.module.run_command(cmd, cwd=self.b_dest, environ_update=dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale, LANGUAGE=locale))
if rc != 0:
self.module.debug(err)
raise UnarchiveError('Unable to list files in the archive: %s' % err)
for filename in out.splitlines():
# Compensate for locale-related problems in gtar output (octal unicode representation) #11348
# filename = filename.decode('string_escape')
filename = to_native(codecs.escape_decode(filename)[0])
# We don't allow absolute filenames. If the user wants to unarchive rooted in "/"
# they need to use "dest: '/'". This follows the defaults for gtar, pax, etc.
# Allowing absolute filenames here also causes bugs: https://github.com/ansible/ansible/issues/21397
if filename.startswith('/'):
filename = filename[1:]
exclude_flag = False
if self.excludes:
for exclude in self.excludes:
if fnmatch.fnmatch(filename, exclude):
exclude_flag = True
break
if not exclude_flag:
self._files_in_archive.append(to_native(filename))
return self._files_in_archive
def is_unarchived(self):
cmd = [self.cmd_path, '--diff', '-C', self.b_dest]
if self.zipflag:
cmd.append(self.zipflag)
if self.opts:
cmd.extend(['--show-transformed-names'] + self.opts)
if self.file_args['owner']:
cmd.append('--owner=' + quote(self.file_args['owner']))
if self.file_args['group']:
cmd.append('--group=' + quote(self.file_args['group']))
if self.module.params['keep_newer']:
cmd.append('--keep-newer-files')
if self.excludes:
cmd.extend(['--exclude=' + f for f in self.excludes])
cmd.extend(['-f', self.src])
if self.include_files:
cmd.extend(self.include_files)
locale = get_best_parsable_locale(self.module)
rc, out, err = self.module.run_command(cmd, cwd=self.b_dest, environ_update=dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale, LANGUAGE=locale))
# Check whether the differences are in something that we're
# setting anyway
# What is different
unarchived = True
old_out = out
out = ''
run_uid = os.getuid()
# When unarchiving as a user, or when owner/group/mode is supplied --diff is insufficient
# Only way to be sure is to check request with what is on disk (as we do for zip)
# Leave this up to set_fs_attributes_if_different() instead of inducing a (false) change
for line in old_out.splitlines() + err.splitlines():
# FIXME: Remove the bogus lines from error-output as well !
# Ignore bogus errors on empty filenames (when using --split-component)
if EMPTY_FILE_RE.search(line):
continue
if run_uid == 0 and not self.file_args['owner'] and OWNER_DIFF_RE.search(line):
out += line + '\n'
if run_uid == 0 and not self.file_args['group'] and GROUP_DIFF_RE.search(line):
out += line + '\n'
if not self.file_args['mode'] and MODE_DIFF_RE.search(line):
out += line + '\n'
if MOD_TIME_DIFF_RE.search(line):
out += line + '\n'
if MISSING_FILE_RE.search(line):
out += line + '\n'
if INVALID_OWNER_RE.search(line):
out += line + '\n'
if INVALID_GROUP_RE.search(line):
out += line + '\n'
if SYMLINK_DIFF_RE.search(line):
out += line + '\n'
if out:
unarchived = False
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
def unarchive(self):
cmd = [self.cmd_path, '--extract', '-C', self.b_dest]
if self.zipflag:
cmd.append(self.zipflag)
if self.opts:
cmd.extend(['--show-transformed-names'] + self.opts)
if self.file_args['owner']:
cmd.append('--owner=' + quote(self.file_args['owner']))
if self.file_args['group']:
cmd.append('--group=' + quote(self.file_args['group']))
if self.module.params['keep_newer']:
cmd.append('--keep-newer-files')
if self.excludes:
cmd.extend(['--exclude=' + f for f in self.excludes])
cmd.extend(['-f', self.src])
if self.include_files:
cmd.extend(self.include_files)
locale = get_best_parsable_locale(self.module)
rc, out, err = self.module.run_command(cmd, cwd=self.b_dest, environ_update=dict(LANG=locale, LC_ALL=locale, LC_MESSAGES=locale, LANGUAGE=locale))
return dict(cmd=cmd, rc=rc, out=out, err=err)
def can_handle_archive(self):
# Prefer gtar (GNU tar) as it supports the compression options -z, -j and -J
try:
self.cmd_path = get_bin_path('gtar')
except ValueError:
# Fallback to tar
try:
self.cmd_path = get_bin_path('tar')
except ValueError:
return False, "Unable to find required 'gtar' or 'tar' binary in the path"
self.tar_type = self._get_tar_type()
if self.tar_type != 'gnu':
return False, 'Command "%s" detected as tar type %s. GNU tar required.' % (self.cmd_path, self.tar_type)
try:
if self.files_in_archive:
return True, None
except UnarchiveError as e:
return False, 'Command "%s" could not handle archive: %s' % (self.cmd_path, to_native(e))
# Errors and no files in archive assume that we weren't able to
# properly unarchive it
return False, 'Command "%s" found no files in archive. Empty archive files are not supported.' % self.cmd_path
# Class to handle tar files that aren't compressed
class TarArchive(TgzArchive):
def __init__(self, src, b_dest, file_args, module):
super(TarArchive, self).__init__(src, b_dest, file_args, module)
# argument to tar
self.zipflag = ''
# Class to handle bzip2 compressed tar files
class TarBzipArchive(TgzArchive):
def __init__(self, src, b_dest, file_args, module):
super(TarBzipArchive, self).__init__(src, b_dest, file_args, module)
self.zipflag = '-j'
# Class to handle xz compressed tar files
class TarXzArchive(TgzArchive):
def __init__(self, src, b_dest, file_args, module):
super(TarXzArchive, self).__init__(src, b_dest, file_args, module)
self.zipflag = '-J'
# Class to handle zstd compressed tar files
class TarZstdArchive(TgzArchive):
def __init__(self, src, b_dest, file_args, module):
super(TarZstdArchive, self).__init__(src, b_dest, file_args, module)
# GNU Tar supports the --use-compress-program option to
# specify which executable to use for
# compression/decompression.
#
# Note: some flavors of BSD tar support --zstd (e.g., FreeBSD
# 12.2), but the TgzArchive class only supports GNU Tar.
self.zipflag = '--use-compress-program=zstd'
class ZipZArchive(ZipArchive):
def __init__(self, src, b_dest, file_args, module):
super(ZipZArchive, self).__init__(src, b_dest, file_args, module)
self.zipinfoflag = '-Z'
self.binaries = (
('unzip', 'cmd_path'),
('unzip', 'zipinfo_cmd_path'),
)
def can_handle_archive(self):
unzip_available, error_msg = super(ZipZArchive, self).can_handle_archive()
if not unzip_available:
return unzip_available, error_msg
# Ensure unzip -Z is available before we use it in is_unarchive
cmd = [self.zipinfo_cmd_path, self.zipinfoflag]
rc, out, err = self.module.run_command(cmd)
if 'zipinfo' in out.lower():
return True, None
return False, 'Command "unzip -Z" could not handle archive: %s' % err
# try handlers in order and return the one that works or bail if none work
def pick_handler(src, dest, file_args, module):
handlers = [ZipArchive, ZipZArchive, TgzArchive, TarArchive, TarBzipArchive, TarXzArchive, TarZstdArchive]
reasons = set()
for handler in handlers:
obj = handler(src, dest, file_args, module)
(can_handle, reason) = obj.can_handle_archive()
if can_handle:
return obj
reasons.add(reason)
reason_msg = '\n'.join(reasons)
module.fail_json(msg='Failed to find handler for "%s". Make sure the required command to extract the file is installed.\n%s' % (src, reason_msg))
def main():
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec=dict(
src=dict(type='path', required=True),
dest=dict(type='path', required=True),
remote_src=dict(type='bool', default=False),
creates=dict(type='path'),
list_files=dict(type='bool', default=False),
keep_newer=dict(type='bool', default=False),
exclude=dict(type='list', elements='str', default=[]),
include=dict(type='list', elements='str', default=[]),
extra_opts=dict(type='list', elements='str', default=[]),
validate_certs=dict(type='bool', default=True),
io_buffer_size=dict(type='int', default=64 * 1024),
# Options that are for the action plugin, but ignored by the module itself.
# We have them here so that the sanity tests pass without ignores, which
# reduces the likelihood of further bugs added.
copy=dict(type='bool', default=True),
decrypt=dict(type='bool', default=True),
),
add_file_common_args=True,
# check-mode only works for zip files, we cover that later
supports_check_mode=True,
mutually_exclusive=[('include', 'exclude')],
)
src = module.params['src']
dest = module.params['dest']
abs_dest = os.path.abspath(dest)
b_dest = to_bytes(abs_dest, errors='surrogate_or_strict')
if not os.path.isabs(dest):
module.warn("Relative destination path '{dest}' was resolved to absolute path '{abs_dest}'.".format(dest=dest, abs_dest=abs_dest))
remote_src = module.params['remote_src']
file_args = module.load_file_common_arguments(module.params)
# did tar file arrive?
if not os.path.exists(src):
if not remote_src:
module.fail_json(msg="Source '%s' failed to transfer" % src)
# If remote_src=true, and src= contains ://, try and download the file to a temp directory.
elif '://' in src:
src = fetch_file(module, src)
else:
module.fail_json(msg="Source '%s' does not exist" % src)
if not os.access(src, os.R_OK):
module.fail_json(msg="Source '%s' not readable" % src)
# ensure src is an absolute path before picking handlers
src = os.path.abspath(src)
# skip working with 0 size archives
try:
if os.path.getsize(src) == 0:
module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" % src)
except Exception as e:
module.fail_json(msg="Source '%s' not readable, %s" % (src, to_native(e)))
# is dest OK to receive tar file?
if not os.path.isdir(b_dest):
module.fail_json(msg="Destination '%s' is not a directory" % dest)
handler = pick_handler(src, b_dest, file_args, module)
res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)
# do we need to do unpack?
check_results = handler.is_unarchived()
# DEBUG
# res_args['check_results'] = check_results
if module.check_mode:
res_args['changed'] = not check_results['unarchived']
elif check_results['unarchived']:
res_args['changed'] = False
else:
# do the unpack
try:
res_args['extract_results'] = handler.unarchive()
if res_args['extract_results']['rc'] != 0:
module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
except IOError:
module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
else:
res_args['changed'] = True
# Get diff if required
if check_results.get('diff', False):
res_args['diff'] = {'prepared': check_results['diff']}
# Run only if we found differences (idempotence) or diff was missing
if res_args.get('diff', True) and not module.check_mode:
# do we need to change perms?
top_folders = []
for filename in handler.files_in_archive:
file_args['path'] = os.path.join(b_dest, to_bytes(filename, errors='surrogate_or_strict'))
try:
res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'], expand=False)
except (IOError, OSError) as e:
module.fail_json(msg="Unexpected error when accessing exploded file: %s" % to_native(e), **res_args)
if '/' in filename:
top_folder_path = filename.split('/')[0]
if top_folder_path not in top_folders:
top_folders.append(top_folder_path)
# make sure top folders have the right permissions
# https://github.com/ansible/ansible/issues/35426
if top_folders:
for f in top_folders:
file_args['path'] = "%s/%s" % (dest, f)
try:
res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'], expand=False)
except (IOError, OSError) as e:
module.fail_json(msg="Unexpected error when accessing exploded file: %s" % to_native(e), **res_args)
if module.params['list_files']:
res_args['files'] = handler.files_in_archive
module.exit_json(**res_args)
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 70,339 |
known_hosts module breaks when using @cert-authority keys
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
If I attempt to add a key for `host.example.com` ansible removes my `@cert-authority *.example.com`
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
known_hosts
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible-playbook 2.9.10
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.7/dist-packages/ansible
executable location = /usr/local/bin/ansible-playbook
python version = 3.7.3 (default, Dec 20 2019, 18:57:59) [GCC 8.3.0]
```
##### CONFIGURATION
Completely default
##### OS / ENVIRONMENT
Docker image on amd64 running debian:buster-slim image id: 43e3995ee54a
Installed via pip
##### STEPS TO REPRODUCE
play.yml
- hosts: localhost
gather_facts: no
vars:
known_hosts_path:
example_com_ed25519_key: >
host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
tasks:
- name: Create a copy of known_hosts
copy:
src: existing_known_hosts
dest: test_known_hosts
- name: add the ed25519 host key
known_hosts:
name: host.example.com
key: "{{ example_com_ed25519_key }}"
state: present
path: "test_known_hosts"
register: result
- name: get the file content
command: "cat test_known_hosts"
register: known_hosts_v1
- name: assert that the key was added and ordering preserved
assert:
that:
- 'result is changed'
- 'known_hosts_v1.stdout_lines[-1].strip() == example_com_ed25519_key.strip()'
- 'known_hosts_v1.stdout_lines[4].startswith("@cert-authority")'
existing_known_hosts
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
Output
# ansible-playbook play.yml --diff
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
PLAY [localhost] *************************************************************************************************************
...
TASK [add the ed25519 host key] **********************************************************************************************
--- before: test_known_hosts
+++ after: test_known_hosts
@@ -1,3 +1,3 @@
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
-@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
+host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
changed: [localhost]
TASK [get the file content] **************************************************************************************************
changed: [localhost]
TASK [assert that the key was added and ordering preserved] ******************************************************************
fatal: [localhost]: FAILED! => {"msg": "The conditional check 'known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")' failed. The error was: error while evaluating conditional (known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")): list object has no element 4"}
PLAY RECAP *******************************************************************************************************************
localhost : ok=3 changed=3 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
I expected adding `hosts.example.com` not to have the side effect of removing `@cert-authority *.example.com ...`
##### ACTUAL RESULTS
See Above
|
https://github.com/ansible/ansible/issues/70339
|
https://github.com/ansible/ansible/pull/70340
|
7dde4901d42e4c043adbd980c941b97cd3237bb6
|
a4b00793be46f703e32ee4c440f303d19d2c652d
| 2020-06-27T04:20:12Z |
python
| 2023-11-22T00:55:01Z |
changelogs/fragments/known_hosts_cert-authority_keys.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 70,339 |
known_hosts module breaks when using @cert-authority keys
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
If I attempt to add a key for `host.example.com` ansible removes my `@cert-authority *.example.com`
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
known_hosts
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible-playbook 2.9.10
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.7/dist-packages/ansible
executable location = /usr/local/bin/ansible-playbook
python version = 3.7.3 (default, Dec 20 2019, 18:57:59) [GCC 8.3.0]
```
##### CONFIGURATION
Completely default
##### OS / ENVIRONMENT
Docker image on amd64 running debian:buster-slim image id: 43e3995ee54a
Installed via pip
##### STEPS TO REPRODUCE
play.yml
- hosts: localhost
gather_facts: no
vars:
known_hosts_path:
example_com_ed25519_key: >
host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
tasks:
- name: Create a copy of known_hosts
copy:
src: existing_known_hosts
dest: test_known_hosts
- name: add the ed25519 host key
known_hosts:
name: host.example.com
key: "{{ example_com_ed25519_key }}"
state: present
path: "test_known_hosts"
register: result
- name: get the file content
command: "cat test_known_hosts"
register: known_hosts_v1
- name: assert that the key was added and ordering preserved
assert:
that:
- 'result is changed'
- 'known_hosts_v1.stdout_lines[-1].strip() == example_com_ed25519_key.strip()'
- 'known_hosts_v1.stdout_lines[4].startswith("@cert-authority")'
existing_known_hosts
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
Output
# ansible-playbook play.yml --diff
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
PLAY [localhost] *************************************************************************************************************
...
TASK [add the ed25519 host key] **********************************************************************************************
--- before: test_known_hosts
+++ after: test_known_hosts
@@ -1,3 +1,3 @@
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
-@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
+host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
changed: [localhost]
TASK [get the file content] **************************************************************************************************
changed: [localhost]
TASK [assert that the key was added and ordering preserved] ******************************************************************
fatal: [localhost]: FAILED! => {"msg": "The conditional check 'known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")' failed. The error was: error while evaluating conditional (known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")): list object has no element 4"}
PLAY RECAP *******************************************************************************************************************
localhost : ok=3 changed=3 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
I expected adding `hosts.example.com` not to have the side effect of removing `@cert-authority *.example.com ...`
##### ACTUAL RESULTS
See Above
|
https://github.com/ansible/ansible/issues/70339
|
https://github.com/ansible/ansible/pull/70340
|
7dde4901d42e4c043adbd980c941b97cd3237bb6
|
a4b00793be46f703e32ee4c440f303d19d2c652d
| 2020-06-27T04:20:12Z |
python
| 2023-11-22T00:55:01Z |
lib/ansible/modules/known_hosts.py
|
# Copyright: (c) 2014, Matthew Vernon <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = r'''
---
module: known_hosts
short_description: Add or remove a host from the C(known_hosts) file
description:
- The M(ansible.builtin.known_hosts) module lets you add or remove a host keys from the C(known_hosts) file.
- Starting at Ansible 2.2, multiple entries per host are allowed, but only one for each key type supported by ssh.
This is useful if you're going to want to use the M(ansible.builtin.git) module over ssh, for example.
- If you have a very large number of host keys to manage, you will find the M(ansible.builtin.template) module more useful.
version_added: "1.9"
options:
name:
aliases: [ 'host' ]
description:
- The host to add or remove (must match a host specified in key). It will be converted to lowercase so that ssh-keygen can find it.
- Must match with <hostname> or <ip> present in key attribute.
- For custom SSH port, O(name) needs to specify port as well. See example section.
type: str
required: true
key:
description:
- The SSH public host key, as a string.
- Required if O(state=present), optional when O(state=absent), in which case all keys for the host are removed.
- The key must be in the right format for SSH (see sshd(8), section "SSH_KNOWN_HOSTS FILE FORMAT").
- Specifically, the key should not match the format that is found in an SSH pubkey file, but should rather have the hostname prepended to a
line that includes the pubkey, the same way that it would appear in the known_hosts file. The value prepended to the line must also match
the value of the name parameter.
- Should be of format C(<hostname[,IP]> ssh-rsa <pubkey>).
- For custom SSH port, O(key) needs to specify port as well. See example section.
type: str
path:
description:
- The known_hosts file to edit.
- The known_hosts file will be created if needed. The rest of the path must exist prior to running the module.
default: "~/.ssh/known_hosts"
type: path
hash_host:
description:
- Hash the hostname in the known_hosts file.
type: bool
default: "no"
version_added: "2.3"
state:
description:
- V(present) to add the host key.
- V(absent) to remove it.
choices: [ "absent", "present" ]
default: "present"
type: str
attributes:
check_mode:
support: full
diff_mode:
support: full
platform:
platforms: posix
extends_documentation_fragment:
- action_common_attributes
author:
- Matthew Vernon (@mcv21)
'''
EXAMPLES = r'''
- name: Tell the host about our servers it might want to ssh to
ansible.builtin.known_hosts:
path: /etc/ssh/ssh_known_hosts
name: foo.com.invalid
key: "{{ lookup('ansible.builtin.file', 'pubkeys/foo.com.invalid') }}"
- name: Another way to call known_hosts
ansible.builtin.known_hosts:
name: host1.example.com # or 10.9.8.77
key: host1.example.com,10.9.8.77 ssh-rsa ASDeararAIUHI324324 # some key gibberish
path: /etc/ssh/ssh_known_hosts
state: present
- name: Add host with custom SSH port
ansible.builtin.known_hosts:
name: '[host1.example.com]:2222'
key: '[host1.example.com]:2222 ssh-rsa ASDeararAIUHI324324' # some key gibberish
path: /etc/ssh/ssh_known_hosts
state: present
'''
# Makes sure public host keys are present or absent in the given known_hosts
# file.
#
# Arguments
# =========
# name = hostname whose key should be added (alias: host)
# key = line(s) to add to known_hosts file
# path = the known_hosts file to edit (default: ~/.ssh/known_hosts)
# hash_host = yes|no (default: no) hash the hostname in the known_hosts file
# state = absent|present (default: present)
import base64
import errno
import hashlib
import hmac
import os
import os.path
import re
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.converters import to_bytes, to_native
def enforce_state(module, params):
"""
Add or remove key.
"""
host = params["name"].lower()
key = params.get("key", None)
path = params.get("path")
hash_host = params.get("hash_host")
state = params.get("state")
# Find the ssh-keygen binary
sshkeygen = module.get_bin_path("ssh-keygen", True)
if not key and state != "absent":
module.fail_json(msg="No key specified when adding a host")
if key and hash_host:
key = hash_host_key(host, key)
# Trailing newline in files gets lost, so re-add if necessary
if key and not key.endswith('\n'):
key += '\n'
sanity_check(module, host, key, sshkeygen)
found, replace_or_add, found_line = search_for_host_key(module, host, key, path, sshkeygen)
params['diff'] = compute_diff(path, found_line, replace_or_add, state, key)
# check if we are trying to remove a non matching key,
# in that case return with no change to the host
if state == 'absent' and not found_line and key:
params['changed'] = False
return params
# We will change state if found==True & state!="present"
# or found==False & state=="present"
# i.e found XOR (state=="present")
# Alternatively, if replace is true (i.e. key present, and we must change
# it)
if module.check_mode:
module.exit_json(changed=replace_or_add or (state == "present") != found,
diff=params['diff'])
# Now do the work.
# Only remove whole host if found and no key provided
if found and not key and state == "absent":
module.run_command([sshkeygen, '-R', host, '-f', path], check_rc=True)
params['changed'] = True
# Next, add a new (or replacing) entry
if replace_or_add or found != (state == "present"):
try:
inf = open(path, "r")
except IOError as e:
if e.errno == errno.ENOENT:
inf = None
else:
module.fail_json(msg="Failed to read %s: %s" % (path, str(e)))
try:
with tempfile.NamedTemporaryFile(mode='w+', dir=os.path.dirname(path), delete=False) as outf:
if inf is not None:
for line_number, line in enumerate(inf):
if found_line == (line_number + 1) and (replace_or_add or state == 'absent'):
continue # skip this line to replace its key
outf.write(line)
inf.close()
if state == 'present':
outf.write(key)
except (IOError, OSError) as e:
module.fail_json(msg="Failed to write to file %s: %s" % (path, to_native(e)))
else:
module.atomic_move(outf.name, path)
params['changed'] = True
return params
def sanity_check(module, host, key, sshkeygen):
'''Check supplied key is sensible
host and key are parameters provided by the user; If the host
provided is inconsistent with the key supplied, then this function
quits, providing an error to the user.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
'''
# If no key supplied, we're doing a removal, and have nothing to check here.
if not key:
return
# Rather than parsing the key ourselves, get ssh-keygen to do it
# (this is essential for hashed keys, but otherwise useful, as the
# key question is whether ssh-keygen thinks the key matches the host).
# The approach is to write the key to a temporary file,
# and then attempt to look up the specified host in that file.
if re.search(r'\S+(\s+)?,(\s+)?', host):
module.fail_json(msg="Comma separated list of names is not supported. "
"Please pass a single name to lookup in the known_hosts file.")
with tempfile.NamedTemporaryFile(mode='w+') as outf:
try:
outf.write(key)
outf.flush()
except IOError as e:
module.fail_json(msg="Failed to write to temporary file %s: %s" %
(outf.name, to_native(e)))
sshkeygen_command = [sshkeygen, '-F', host, '-f', outf.name]
rc, stdout, stderr = module.run_command(sshkeygen_command)
if stdout == '': # host not found
module.fail_json(msg="Host parameter does not match hashed host field in supplied key")
def search_for_host_key(module, host, key, path, sshkeygen):
'''search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line)
Looks up host and keytype in the known_hosts file path; if it's there, looks to see
if one of those entries matches key. Returns:
found (Boolean): is host found in path?
replace_or_add (Boolean): is the key in path different to that supplied by user?
found_line (int or None): the line where a key of the same type was found
if found=False, then replace is always False.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
'''
if os.path.exists(path) is False:
return False, False, None
sshkeygen_command = [sshkeygen, '-F', host, '-f', path]
# openssh >=6.4 has changed ssh-keygen behaviour such that it returns
# 1 if no host is found, whereas previously it returned 0
rc, stdout, stderr = module.run_command(sshkeygen_command, check_rc=False)
if stdout == '' and stderr == '' and (rc == 0 or rc == 1):
return False, False, None # host not found, no other errors
if rc != 0: # something went wrong
module.fail_json(msg="ssh-keygen failed (rc=%d, stdout='%s',stderr='%s')" % (rc, stdout, stderr))
# If user supplied no key, we don't want to try and replace anything with it
if not key:
return True, False, None
lines = stdout.split('\n')
new_key = normalize_known_hosts_key(key)
for lnum, l in enumerate(lines):
if l == '':
continue
elif l[0] == '#': # info output from ssh-keygen; contains the line number where key was found
try:
# This output format has been hardcoded in ssh-keygen since at least OpenSSH 4.0
# It always outputs the non-localized comment before the found key
found_line = int(re.search(r'found: line (\d+)', l).group(1))
except IndexError:
module.fail_json(msg="failed to parse output of ssh-keygen for line number: '%s'" % l)
else:
found_key = normalize_known_hosts_key(l)
if new_key['host'][:3] == '|1|' and found_key['host'][:3] == '|1|': # do not change host hash if already hashed
new_key['host'] = found_key['host']
if new_key == found_key: # found a match
return True, False, found_line # found exactly the same key, don't replace
elif new_key['type'] == found_key['type']: # found a different key for the same key type
return True, True, found_line
# No match found, return found and replace, but no line
return True, True, None
def hash_host_key(host, key):
hmac_key = os.urandom(20)
hashed_host = hmac.new(hmac_key, to_bytes(host), hashlib.sha1).digest()
parts = key.strip().split()
# @ indicates the optional marker field used for @cert-authority or @revoked
i = 1 if parts[0][0] == '@' else 0
parts[i] = '|1|%s|%s' % (to_native(base64.b64encode(hmac_key)), to_native(base64.b64encode(hashed_host)))
return ' '.join(parts)
def normalize_known_hosts_key(key):
'''
Transform a key, either taken from a known_host file or provided by the
user, into a normalized form.
The host part (which might include multiple hostnames or be hashed) gets
replaced by the provided host. Also, any spurious information gets removed
from the end (like the username@host tag usually present in hostkeys, but
absent in known_hosts files)
'''
key = key.strip() # trim trailing newline
k = key.split()
d = dict()
# The optional "marker" field, used for @cert-authority or @revoked
if k[0][0] == '@':
d['options'] = k[0]
d['host'] = k[1]
d['type'] = k[2]
d['key'] = k[3]
else:
d['host'] = k[0]
d['type'] = k[1]
d['key'] = k[2]
return d
def compute_diff(path, found_line, replace_or_add, state, key):
diff = {
'before_header': path,
'after_header': path,
'before': '',
'after': '',
}
try:
inf = open(path, "r")
except IOError as e:
if e.errno == errno.ENOENT:
diff['before_header'] = '/dev/null'
else:
diff['before'] = inf.read()
inf.close()
lines = diff['before'].splitlines(1)
if (replace_or_add or state == 'absent') and found_line is not None and 1 <= found_line <= len(lines):
del lines[found_line - 1]
if state == 'present' and (replace_or_add or found_line is None):
lines.append(key)
diff['after'] = ''.join(lines)
return diff
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, type='str', aliases=['host']),
key=dict(required=False, type='str', no_log=False),
path=dict(default="~/.ssh/known_hosts", type='path'),
hash_host=dict(required=False, type='bool', default=False),
state=dict(default='present', choices=['absent', 'present']),
),
supports_check_mode=True
)
results = enforce_state(module, module.params)
module.exit_json(**results)
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 70,339 |
known_hosts module breaks when using @cert-authority keys
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
If I attempt to add a key for `host.example.com` ansible removes my `@cert-authority *.example.com`
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
known_hosts
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible-playbook 2.9.10
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.7/dist-packages/ansible
executable location = /usr/local/bin/ansible-playbook
python version = 3.7.3 (default, Dec 20 2019, 18:57:59) [GCC 8.3.0]
```
##### CONFIGURATION
Completely default
##### OS / ENVIRONMENT
Docker image on amd64 running debian:buster-slim image id: 43e3995ee54a
Installed via pip
##### STEPS TO REPRODUCE
play.yml
- hosts: localhost
gather_facts: no
vars:
known_hosts_path:
example_com_ed25519_key: >
host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
tasks:
- name: Create a copy of known_hosts
copy:
src: existing_known_hosts
dest: test_known_hosts
- name: add the ed25519 host key
known_hosts:
name: host.example.com
key: "{{ example_com_ed25519_key }}"
state: present
path: "test_known_hosts"
register: result
- name: get the file content
command: "cat test_known_hosts"
register: known_hosts_v1
- name: assert that the key was added and ordering preserved
assert:
that:
- 'result is changed'
- 'known_hosts_v1.stdout_lines[-1].strip() == example_com_ed25519_key.strip()'
- 'known_hosts_v1.stdout_lines[4].startswith("@cert-authority")'
existing_known_hosts
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
Output
# ansible-playbook play.yml --diff
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
PLAY [localhost] *************************************************************************************************************
...
TASK [add the ed25519 host key] **********************************************************************************************
--- before: test_known_hosts
+++ after: test_known_hosts
@@ -1,3 +1,3 @@
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
-@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
+host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
changed: [localhost]
TASK [get the file content] **************************************************************************************************
changed: [localhost]
TASK [assert that the key was added and ordering preserved] ******************************************************************
fatal: [localhost]: FAILED! => {"msg": "The conditional check 'known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")' failed. The error was: error while evaluating conditional (known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")): list object has no element 4"}
PLAY RECAP *******************************************************************************************************************
localhost : ok=3 changed=3 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
I expected adding `hosts.example.com` not to have the side effect of removing `@cert-authority *.example.com ...`
##### ACTUAL RESULTS
See Above
|
https://github.com/ansible/ansible/issues/70339
|
https://github.com/ansible/ansible/pull/70340
|
7dde4901d42e4c043adbd980c941b97cd3237bb6
|
a4b00793be46f703e32ee4c440f303d19d2c652d
| 2020-06-27T04:20:12Z |
python
| 2023-11-22T00:55:01Z |
test/integration/targets/known_hosts/defaults/main.yml
|
---
example_org_rsa_key: >
example.org ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAglyZmHHWskQ9wkh8LYbIqzvg99/oloneH7BaZ02ripJUy/2Zynv4tgUfm9fdXvAb1XXCEuTRnts9FBer87+voU0FPRgx3CfY9Sgr0FspUjnm4lqs53FIab1psddAaS7/F7lrnjl6VqBtPwMRQZG7qlml5uogGJwYJHxX0PGtsdoTJsM=
example_org_ed25519_key: >
example.org ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIzlnSq5ESxLgW0avvPk3j7zLV59hcAPkxrMNdnZMKP2
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 70,339 |
known_hosts module breaks when using @cert-authority keys
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
If I attempt to add a key for `host.example.com` ansible removes my `@cert-authority *.example.com`
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
known_hosts
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible-playbook 2.9.10
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.7/dist-packages/ansible
executable location = /usr/local/bin/ansible-playbook
python version = 3.7.3 (default, Dec 20 2019, 18:57:59) [GCC 8.3.0]
```
##### CONFIGURATION
Completely default
##### OS / ENVIRONMENT
Docker image on amd64 running debian:buster-slim image id: 43e3995ee54a
Installed via pip
##### STEPS TO REPRODUCE
play.yml
- hosts: localhost
gather_facts: no
vars:
known_hosts_path:
example_com_ed25519_key: >
host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
tasks:
- name: Create a copy of known_hosts
copy:
src: existing_known_hosts
dest: test_known_hosts
- name: add the ed25519 host key
known_hosts:
name: host.example.com
key: "{{ example_com_ed25519_key }}"
state: present
path: "test_known_hosts"
register: result
- name: get the file content
command: "cat test_known_hosts"
register: known_hosts_v1
- name: assert that the key was added and ordering preserved
assert:
that:
- 'result is changed'
- 'known_hosts_v1.stdout_lines[-1].strip() == example_com_ed25519_key.strip()'
- 'known_hosts_v1.stdout_lines[4].startswith("@cert-authority")'
existing_known_hosts
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
Output
# ansible-playbook play.yml --diff
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
PLAY [localhost] *************************************************************************************************************
...
TASK [add the ed25519 host key] **********************************************************************************************
--- before: test_known_hosts
+++ after: test_known_hosts
@@ -1,3 +1,3 @@
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
-@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
+host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
changed: [localhost]
TASK [get the file content] **************************************************************************************************
changed: [localhost]
TASK [assert that the key was added and ordering preserved] ******************************************************************
fatal: [localhost]: FAILED! => {"msg": "The conditional check 'known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")' failed. The error was: error while evaluating conditional (known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")): list object has no element 4"}
PLAY RECAP *******************************************************************************************************************
localhost : ok=3 changed=3 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
I expected adding `hosts.example.com` not to have the side effect of removing `@cert-authority *.example.com ...`
##### ACTUAL RESULTS
See Above
|
https://github.com/ansible/ansible/issues/70339
|
https://github.com/ansible/ansible/pull/70340
|
7dde4901d42e4c043adbd980c941b97cd3237bb6
|
a4b00793be46f703e32ee4c440f303d19d2c652d
| 2020-06-27T04:20:12Z |
python
| 2023-11-22T00:55:01Z |
test/integration/targets/known_hosts/files/existing_known_hosts
|
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
|1|d71/U7CbOH3Su+d2zxlbmiNfXtI=|g2YSPAVoK7bmg16FCOOPKTZe2BM= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==
|1|L0TqxOhAVh6mLZ2lbHdTv3owun0=|vn0La5pbHNxin3XzQQdvaOulvVU= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBCNLCAA/SjVF3jkmlAlkgh+GtZdgxtusHaK66fcA7XSgCpQOdri1dGmND6pQDGwsxiKMy4Ou1GB2DR4N0G9T5E8=
|1|WPo7yAOdlQKLSuRatNJCmDoga0k=|D/QybGglKokWuEQUe9Okpy5uSh0= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBCNLCAA/SjVF3jkmlAlkgh+GtZdgxtusHaK66fcA7XSgCpQOdri1dGmND6pQDGwsxiKMy4Ou1GB2DR4N0G9T5E8=
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 70,339 |
known_hosts module breaks when using @cert-authority keys
|
<!--- Verify first that your issue is not already reported on GitHub -->
<!--- Also test if the latest release and devel branch are affected too -->
<!--- Complete *all* sections as described, this form is processed automatically -->
##### SUMMARY
If I attempt to add a key for `host.example.com` ansible removes my `@cert-authority *.example.com`
##### ISSUE TYPE
- Bug Report
##### COMPONENT NAME
known_hosts
##### ANSIBLE VERSION
<!--- Paste verbatim output from "ansible --version" between quotes -->
```paste below
ansible-playbook 2.9.10
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.7/dist-packages/ansible
executable location = /usr/local/bin/ansible-playbook
python version = 3.7.3 (default, Dec 20 2019, 18:57:59) [GCC 8.3.0]
```
##### CONFIGURATION
Completely default
##### OS / ENVIRONMENT
Docker image on amd64 running debian:buster-slim image id: 43e3995ee54a
Installed via pip
##### STEPS TO REPRODUCE
play.yml
- hosts: localhost
gather_facts: no
vars:
known_hosts_path:
example_com_ed25519_key: >
host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
tasks:
- name: Create a copy of known_hosts
copy:
src: existing_known_hosts
dest: test_known_hosts
- name: add the ed25519 host key
known_hosts:
name: host.example.com
key: "{{ example_com_ed25519_key }}"
state: present
path: "test_known_hosts"
register: result
- name: get the file content
command: "cat test_known_hosts"
register: known_hosts_v1
- name: assert that the key was added and ordering preserved
assert:
that:
- 'result is changed'
- 'known_hosts_v1.stdout_lines[-1].strip() == example_com_ed25519_key.strip()'
- 'known_hosts_v1.stdout_lines[4].startswith("@cert-authority")'
existing_known_hosts
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
Output
# ansible-playbook play.yml --diff
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
PLAY [localhost] *************************************************************************************************************
...
TASK [add the ed25519 host key] **********************************************************************************************
--- before: test_known_hosts
+++ after: test_known_hosts
@@ -1,3 +1,3 @@
example.com ssh-dss AAAAB3NzaC1kc3MAAACBALT8YHxZ59d8yX4oQNPbpdK9AMPRQGKFY9X13S2fp4UMPijiB3ETxU1bAyVTjTbsoag065naFt13aIVl+u0MDPfMuYgVJFEorAZkDlBixvT25zpKyQhI4CtHhZ9Y9YWug4xLqSaFUYEPO31Bie7k8xRfDwsHtzTRPp/0zRURwARHAAAAFQDLx2DZMm3cR8cZtbq4zdSvkXLh0wAAAIAalkQYziu2b5dDRQMiFpDLpPdbymyVhDMmRKnXwAB1+dhGyJLGvfe0xO+ibqGXMp1aZ1iC3a/vHTpYKDVqKIIpFg5r0fxAcAZkJR0aRC8RDxW/IclbIliETD71osIT8I47OFc7vAVCWP8JbV3ZYzR+i98WUkmZ4/ZUzsDl2gi7WAAAAIAsdTGwAo4Fs784TdP2tIHCqxAIz2k4tWmZyeRmXkH5K/P1o9XSh3RNxvFKK7BY6dQK+h9jLunMBs0SCzhMoTcXaJq331kmLJltjq5peo0PnLGnQz5pas0PD7p7gb+soklmHoVp7J2oMC/U4N1Rxr6g9sv8Rpsf1PTPDT3sEbze6A== root@freezer
-@cert-authority *.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDXh1gk2xgS2MekPvo7ZEKiOT7HoyvOAzai2GqoLXGHO
# example.net ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM6OSqweGdPdQ/metQaf738AdN3P+itYp1AypOTgXkyj root@localhost
+host.example.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFp8VtD59XAcxkj1qbfCtB1in9nm5WiipORjtVJUBA6I
changed: [localhost]
TASK [get the file content] **************************************************************************************************
changed: [localhost]
TASK [assert that the key was added and ordering preserved] ******************************************************************
fatal: [localhost]: FAILED! => {"msg": "The conditional check 'known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")' failed. The error was: error while evaluating conditional (known_hosts_v1.stdout_lines[4].startswith(\"@cert-authority\")): list object has no element 4"}
PLAY RECAP *******************************************************************************************************************
localhost : ok=3 changed=3 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
##### EXPECTED RESULTS
<!--- Describe what you expected to happen when running the steps above -->
I expected adding `hosts.example.com` not to have the side effect of removing `@cert-authority *.example.com ...`
##### ACTUAL RESULTS
See Above
|
https://github.com/ansible/ansible/issues/70339
|
https://github.com/ansible/ansible/pull/70340
|
7dde4901d42e4c043adbd980c941b97cd3237bb6
|
a4b00793be46f703e32ee4c440f303d19d2c652d
| 2020-06-27T04:20:12Z |
python
| 2023-11-22T00:55:01Z |
test/integration/targets/known_hosts/tasks/main.yml
|
# test code for the known_hosts module
# (c) 2017, Marius Gedminas <[email protected]>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: copy an existing file in place
copy:
src: existing_known_hosts
dest: "{{ remote_tmp_dir }}/known_hosts"
# test addition
- name: add a new host in check mode
check_mode: yes
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
register: diff
- name: assert that the diff looks as expected (the key was added at the end)
assert:
that:
- 'diff is changed'
- 'diff.diff.before_header == diff.diff.after_header == remote_tmp_dir|expanduser + "/known_hosts"'
- 'diff.diff.after.splitlines()[:-1] == diff.diff.before.splitlines()'
- 'diff.diff.after.splitlines()[-1] == example_org_rsa_key.strip()'
- name: add a new host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts
- name: assert that the key was added and ordering preserved
assert:
that:
- 'result is changed'
- 'known_hosts.stdout_lines[0].startswith("example.com")'
- 'known_hosts.stdout_lines[4].startswith("# example.net")'
- 'known_hosts.stdout_lines[-1].strip() == example_org_rsa_key.strip()'
# test idempotence of addition
- name: add the same host in check mode
check_mode: yes
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
register: check
- name: assert that no changes were expected
assert:
that:
- 'check is not changed'
- 'check.diff.before == check.diff.after'
- name: add the same host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v2
- name: assert that no changes happened
assert:
that:
- 'result is not changed'
- 'result.diff.before == result.diff.after'
- 'known_hosts.stdout == known_hosts_v2.stdout'
# https://github.com/ansible/ansible/issues/78598
# test removing nonexistent host key when the other keys exist for the host
- name: remove different key
known_hosts:
name: example.org
key: "{{ example_org_ed25519_key }}"
state: absent
path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: remove nonexistent key with check mode
known_hosts:
name: example.org
key: "{{ example_org_ed25519_key }}"
state: absent
path: "{{remote_tmp_dir}}/known_hosts"
check_mode: yes
register: check_mode_result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_different_key_removal
- name: assert that no changes happened
assert:
that:
- 'result is not changed'
- 'check_mode_result is not changed'
- 'result.diff.before == result.diff.after'
- 'known_hosts_v2.stdout == known_hosts_different_key_removal.stdout'
# test removal
- name: remove the host in check mode
check_mode: yes
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
path: "{{remote_tmp_dir}}/known_hosts"
register: diff
- name: assert that the diff looks as expected (the key was removed)
assert:
that:
- 'diff.diff.before_header == diff.diff.after_header == remote_tmp_dir|expanduser + "/known_hosts"'
- 'diff.diff.before.splitlines()[-1] == example_org_rsa_key.strip()'
- 'diff.diff.after.splitlines() == diff.diff.before.splitlines()[:-1]'
- name: remove the host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v3
- name: assert that the key was removed and ordering preserved
assert:
that:
- 'diff is changed'
- 'result is changed'
- '"example.org" not in known_hosts_v3.stdout'
- 'known_hosts_v3.stdout_lines[0].startswith("example.com")'
- 'known_hosts_v3.stdout_lines[-1].startswith("# example.net")'
# test idempotence of removal
- name: remove the same host in check mode
check_mode: yes
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
path: "{{remote_tmp_dir}}/known_hosts"
register: check
- name: assert that no changes were expected
assert:
that:
- 'check is not changed'
- 'check.diff.before == check.diff.after'
- name: remove the same host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v4
- name: assert that no changes happened
assert:
that:
- 'result is not changed'
- 'result.diff.before == result.diff.after'
- 'known_hosts_v3.stdout == known_hosts_v4.stdout'
# test addition as hashed_host
- name: add a new hashed host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
hash_host: yes
register: result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v5
- name: assert that the key was added and ordering preserved
assert:
that:
- 'result is changed'
- 'known_hosts_v5.stdout_lines[0].startswith("example.com")'
- 'known_hosts_v5.stdout_lines[4].startswith("# example.net")'
- 'known_hosts_v5.stdout_lines[-1].strip().startswith("|1|")'
- 'known_hosts_v5.stdout_lines[-1].strip().endswith(example_org_rsa_key.strip().split()[-1])'
# test idempotence of hashed addition
- name: add the same host hashed
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
hash_host: yes
register: result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v6
- name: assert that no changes happened
assert:
that:
- 'result is not changed'
- 'result.diff.before == result.diff.after'
- 'known_hosts_v5.stdout == known_hosts_v6.stdout'
# test hashed removal
- name: remove the hashed host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v7
- name: assert that the key was removed and ordering preserved
assert:
that:
- 'result is changed'
- 'example_org_rsa_key.strip().split()[-1] not in known_hosts_v7.stdout'
- 'known_hosts_v7.stdout_lines[0].startswith("example.com")'
- 'known_hosts_v7.stdout_lines[-1].startswith("# example.net")'
# test idempotence of removal
- name: remove the same hashed host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: absent
path: "{{remote_tmp_dir}}/known_hosts"
register: result
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v8
- name: assert that no changes happened
assert:
that:
- 'result is not changed'
- 'result.diff.before == result.diff.after'
- 'known_hosts_v7.stdout == known_hosts_v8.stdout'
# test roundtrip plaintext => hashed => plaintext
# The assertions are rather relaxed, because most of this hash been tested previously
- name: add a new host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v8
- name: assert the plaintext host is there
assert:
that:
- 'known_hosts_v8.stdout_lines[-1].strip() == example_org_rsa_key.strip()'
- name: update the host to hashed mode
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
hash_host: true
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v9
- name: assert the hashed host is there
assert:
that:
- 'known_hosts_v9.stdout_lines[-1].strip().startswith("|1|")'
- 'known_hosts_v9.stdout_lines[-1].strip().endswith(example_org_rsa_key.strip().split()[-1])'
- name: downgrade the host to plaintext mode
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v10
- name: assert the plaintext host is there
assert:
that:
- 'known_hosts_v10.stdout_lines[5].strip() == example_org_rsa_key.strip()'
# ... and remove the host again for the next test
- name: copy an existing file in place
copy:
src: existing_known_hosts
dest: "{{ remote_tmp_dir }}/known_hosts"
# Test key changes
- name: add a hashed host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
hash_host: true
- name: change the key of a hashed host
known_hosts:
name: example.org
key: "{{ example_org_rsa_key.strip()[:-7] + 'RANDOM=' }}"
state: present
path: "{{remote_tmp_dir}}/known_hosts"
hash_host: true
- name: get the file content
command: "cat {{remote_tmp_dir}}/known_hosts"
register: known_hosts_v11
- name: assert the change took place and the key got modified
assert:
that:
- 'known_hosts_v11.stdout_lines[-1].strip().endswith("RANDOM=")'
# test errors
- name: Try using a comma separated list of hosts
known_hosts:
name: example.org,acme.com
key: "{{ example_org_rsa_key }}"
path: "{{remote_tmp_dir}}/known_hosts"
ignore_errors: yes
register: result
- name: Assert that error message was displayed
assert:
that:
- result is failed
- result.msg == 'Comma separated list of names is not supported. Please pass a single name to lookup in the known_hosts file.'
- name: Try using a name that does not match the key
known_hosts:
name: example.com
key: "{{ example_org_rsa_key }}"
path: "{{remote_tmp_dir}}/known_hosts"
ignore_errors: yes
register: result
- name: Assert that name checking failed with error message
assert:
that:
- result is failed
- result.msg == 'Host parameter does not match hashed host field in supplied key'
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,241 |
handler include_tasks fails if no `main.yaml` in role tasks in Ansible 2.15 - worked in 2.14
|
### Summary
In Ansible 2.14.3, e.g. package available in Debian Bullseye, `include_tasks` in a handler within a role always found the task file from `tasks` in that role, if it existed. When I install the current version using pip (2.15.6 at time of writing) this stops working if `tasks/main.yaml` does not exist in the role - creating an empty `main.yaml` (e.g. `touch roles/role_name/tasks/main.yaml`) is sufficient for `import_tasks` in a handler to start working again.
I have manually downgraded to 2.14.3 with pip and verified the same code works with 2.14.3 but not 2.15.6 on the same system, so this is not a difference with the OS package compared to the one pip installs and a bug introduced between those two versions.
In my specific use case, I have a role that is designed to be used via several entry points and has no default `main.yaml`. This has been working fine but started erroring in version 2.15.6. It has taken quite a bit of testing to discover what was making it fail (and why other similar roles for which I used the same pattern but did happen to have a `main.yaml`, were still working).
### Issue Type
Bug Report
### Component Name
ansible.builtin.import_tasks
### Ansible Version
```console
$ # Broken version
$ ansible --version
ansible [core 2.15.6]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
$ # Working version (after "pip install --upgrade ansible-core==2.14.3")
$ ansible --version
ansible [core 2.14.3]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
```
### OS / Environment
Debian 12.0
Ansible installed in Python 3 VirtualEnv with this `requirements.txt` however should be reproducible with just `ansible-core`:
```
ansible-core
# dnspython is required for community.general.dig lookup plugin
dnspython
# hvac required for Hashicorp Vault
hvac
# netaddr required for ansible.utils.ipaddr filter
netaddr
# To manage Windows systems
pywinrm[credssp]
```
### Steps to Reproduce
`site.yaml`:
```yaml
---
- name: Test
hosts: all
gather_facts: false # Not needed for example
tasks:
- ansible.builtin.import_role:
name: test_role
# Custom entry point (no main.yaml required in test_role/tasks)
tasks_from: entry_point.yaml
...
```
`roles/test_role/tasks/entry_point.yaml`:
```yaml
---
# This is not necessary to reproduce the problem, just
# illustrating include_tasks works here without `main.yaml`.
- name: Include tasks works here
ansible.builtin.include_tasks: included_tasks.yaml
# Somehow trigger the role's handler - how is not important,
# but running the handler is necessary to illustrate the problem.
- name: Trigger handler
ansible.builtin.debug:
changed_when: true # Force handler to always be notified
notify: Test handler
...
```
`roles/test_role/tasks/included_tasks.yaml`:
```yaml
---
- name: Included task
ansible.builtin.debug: msg="Included task"
...
```
`roles/test_role/handlers/main.yaml`:
```yaml
---
- name: Test handler
ansible.builtin.include_tasks: included_tasks.yaml
...
```
### Expected Results
When handler runs, `included_tasks` file is found in role's `tasks` folder (expected behaviour can be seen by running with Ansible 2.14.3 or if empty `main.yaml` has been added to `test_role/tasks`):
```
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
With Ansible 2.15.6, error that `included_tasks.yaml` does not exist - in this example, despite it being included successfully in another task list. It is unexpected that `include_tasks`, even when used from a handler, cannot find a tasks file in the the role's `tasks` directory (i.e. where all of a role's tasks live). This used to work fine (and still does if downgrade `ansible-core` to previous version):
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
fatal: [localhost]: FAILED! => {"reason": "Could not find or access '/tmp/test/included_tasks.yaml' on the Ansible Controller."}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=3 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
Note that creating `roles/test_role/main.yaml` causes the same code to begin working with 2.15.6 as well as 2.14.3. Although `main.yaml` is the default entry point, I am not aware of any other Ansible functionality that breaks if it does not exist and only other entry points are used for a specific role, so I think this is a bug.
```
$ touch roles/test_role/tasks/main.yaml
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82241
|
https://github.com/ansible/ansible/pull/82248
|
a4b00793be46f703e32ee4c440f303d19d2c652d
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
| 2023-11-18T17:48:36Z |
python
| 2023-11-22T16:42:51Z |
changelogs/fragments/82241-handler-include-tasks-from.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,241 |
handler include_tasks fails if no `main.yaml` in role tasks in Ansible 2.15 - worked in 2.14
|
### Summary
In Ansible 2.14.3, e.g. package available in Debian Bullseye, `include_tasks` in a handler within a role always found the task file from `tasks` in that role, if it existed. When I install the current version using pip (2.15.6 at time of writing) this stops working if `tasks/main.yaml` does not exist in the role - creating an empty `main.yaml` (e.g. `touch roles/role_name/tasks/main.yaml`) is sufficient for `import_tasks` in a handler to start working again.
I have manually downgraded to 2.14.3 with pip and verified the same code works with 2.14.3 but not 2.15.6 on the same system, so this is not a difference with the OS package compared to the one pip installs and a bug introduced between those two versions.
In my specific use case, I have a role that is designed to be used via several entry points and has no default `main.yaml`. This has been working fine but started erroring in version 2.15.6. It has taken quite a bit of testing to discover what was making it fail (and why other similar roles for which I used the same pattern but did happen to have a `main.yaml`, were still working).
### Issue Type
Bug Report
### Component Name
ansible.builtin.import_tasks
### Ansible Version
```console
$ # Broken version
$ ansible --version
ansible [core 2.15.6]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
$ # Working version (after "pip install --upgrade ansible-core==2.14.3")
$ ansible --version
ansible [core 2.14.3]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
```
### OS / Environment
Debian 12.0
Ansible installed in Python 3 VirtualEnv with this `requirements.txt` however should be reproducible with just `ansible-core`:
```
ansible-core
# dnspython is required for community.general.dig lookup plugin
dnspython
# hvac required for Hashicorp Vault
hvac
# netaddr required for ansible.utils.ipaddr filter
netaddr
# To manage Windows systems
pywinrm[credssp]
```
### Steps to Reproduce
`site.yaml`:
```yaml
---
- name: Test
hosts: all
gather_facts: false # Not needed for example
tasks:
- ansible.builtin.import_role:
name: test_role
# Custom entry point (no main.yaml required in test_role/tasks)
tasks_from: entry_point.yaml
...
```
`roles/test_role/tasks/entry_point.yaml`:
```yaml
---
# This is not necessary to reproduce the problem, just
# illustrating include_tasks works here without `main.yaml`.
- name: Include tasks works here
ansible.builtin.include_tasks: included_tasks.yaml
# Somehow trigger the role's handler - how is not important,
# but running the handler is necessary to illustrate the problem.
- name: Trigger handler
ansible.builtin.debug:
changed_when: true # Force handler to always be notified
notify: Test handler
...
```
`roles/test_role/tasks/included_tasks.yaml`:
```yaml
---
- name: Included task
ansible.builtin.debug: msg="Included task"
...
```
`roles/test_role/handlers/main.yaml`:
```yaml
---
- name: Test handler
ansible.builtin.include_tasks: included_tasks.yaml
...
```
### Expected Results
When handler runs, `included_tasks` file is found in role's `tasks` folder (expected behaviour can be seen by running with Ansible 2.14.3 or if empty `main.yaml` has been added to `test_role/tasks`):
```
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
With Ansible 2.15.6, error that `included_tasks.yaml` does not exist - in this example, despite it being included successfully in another task list. It is unexpected that `include_tasks`, even when used from a handler, cannot find a tasks file in the the role's `tasks` directory (i.e. where all of a role's tasks live). This used to work fine (and still does if downgrade `ansible-core` to previous version):
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
fatal: [localhost]: FAILED! => {"reason": "Could not find or access '/tmp/test/included_tasks.yaml' on the Ansible Controller."}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=3 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
Note that creating `roles/test_role/main.yaml` causes the same code to begin working with 2.15.6 as well as 2.14.3. Although `main.yaml` is the default entry point, I am not aware of any other Ansible functionality that breaks if it does not exist and only other entry points are used for a specific role, so I think this is a bug.
```
$ touch roles/test_role/tasks/main.yaml
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82241
|
https://github.com/ansible/ansible/pull/82248
|
a4b00793be46f703e32ee4c440f303d19d2c652d
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
| 2023-11-18T17:48:36Z |
python
| 2023-11-22T16:42:51Z |
lib/ansible/playbook/included_file.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.executor.task_executor import remove_omit
from ansible.module_utils.common.text.converters import to_text
from ansible.playbook.handler import Handler
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
from ansible.template import Templar
from ansible.utils.display import Display
display = Display()
class IncludedFile:
def __init__(self, filename, args, vars, task, is_role=False):
self._filename = filename
self._args = args
self._vars = vars
self._task = task
self._hosts = []
self._is_role = is_role
self._results = []
def add_host(self, host):
if host not in self._hosts:
self._hosts.append(host)
return
raise ValueError()
def __eq__(self, other):
return (other._filename == self._filename and
other._args == self._args and
other._vars == self._vars and
other._task._uuid == self._task._uuid and
other._task._parent._uuid == self._task._parent._uuid)
def __repr__(self):
return "%s (args=%s vars=%s): %s" % (self._filename, self._args, self._vars, self._hosts)
@staticmethod
def process_include_results(results, iterator, loader, variable_manager):
included_files = []
task_vars_cache = {}
for res in results:
original_host = res._host
original_task = res._task
if original_task.action in C._ACTION_ALL_INCLUDES:
if original_task.loop:
if 'results' not in res._result:
continue
include_results = res._result['results']
else:
include_results = [res._result]
for include_result in include_results:
# if the task result was skipped or failed, continue
if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result and include_result['failed']:
continue
cache_key = (iterator._play, original_host, original_task)
try:
task_vars = task_vars_cache[cache_key]
except KeyError:
task_vars = task_vars_cache[cache_key] = variable_manager.get_vars(play=iterator._play, host=original_host, task=original_task)
include_args = include_result.get('include_args', dict())
special_vars = {}
loop_var = include_result.get('ansible_loop_var', 'item')
index_var = include_result.get('ansible_index_var')
if loop_var in include_result:
task_vars[loop_var] = special_vars[loop_var] = include_result[loop_var]
if index_var and index_var in include_result:
task_vars[index_var] = special_vars[index_var] = include_result[index_var]
if '_ansible_item_label' in include_result:
task_vars['_ansible_item_label'] = special_vars['_ansible_item_label'] = include_result['_ansible_item_label']
if 'ansible_loop' in include_result:
task_vars['ansible_loop'] = special_vars['ansible_loop'] = include_result['ansible_loop']
if original_task.no_log and '_ansible_no_log' not in include_args:
task_vars['_ansible_no_log'] = special_vars['_ansible_no_log'] = original_task.no_log
# get search path for this task to pass to lookup plugins that may be used in pathing to
# the included file
task_vars['ansible_search_path'] = original_task.get_search_path()
# ensure basedir is always in (dwim already searches here but we need to display it)
if loader.get_basedir() not in task_vars['ansible_search_path']:
task_vars['ansible_search_path'].append(loader.get_basedir())
templar = Templar(loader=loader, variables=task_vars)
if original_task.action in C._ACTION_INCLUDE_TASKS:
include_file = None
if original_task._parent:
# handle relative includes by walking up the list of parent include
# tasks and checking the relative result to see if it exists
parent_include = original_task._parent
cumulative_path = None
while parent_include is not None:
if not isinstance(parent_include, TaskInclude):
parent_include = parent_include._parent
continue
if isinstance(parent_include, IncludeRole):
parent_include_dir = parent_include._role_path
else:
try:
parent_include_dir = os.path.dirname(templar.template(parent_include.args.get('_raw_params')))
except AnsibleError as e:
parent_include_dir = ''
display.warning(
'Templating the path of the parent %s failed. The path to the '
'included file may not be found. '
'The error was: %s.' % (original_task.action, to_text(e))
)
if cumulative_path is not None and not os.path.isabs(cumulative_path):
cumulative_path = os.path.join(parent_include_dir, cumulative_path)
else:
cumulative_path = parent_include_dir
include_target = templar.template(include_result['include'])
if original_task._role:
dirname = 'handlers' if isinstance(original_task, Handler) else 'tasks'
new_basedir = os.path.join(original_task._role._role_path, dirname, cumulative_path)
candidates = [
loader.path_dwim_relative(original_task._role._role_path, dirname, include_target),
loader.path_dwim_relative(new_basedir, dirname, include_target)
]
for include_file in candidates:
try:
# may throw OSError
os.stat(include_file)
# or select the task file if it exists
break
except OSError:
pass
else:
include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)
if os.path.exists(include_file):
break
else:
parent_include = parent_include._parent
if include_file is None:
if original_task._role:
include_target = templar.template(include_result['include'])
include_file = loader.path_dwim_relative(
original_task._role._role_path,
'handlers' if isinstance(original_task, Handler) else 'tasks',
include_target,
is_role=True)
else:
include_file = loader.path_dwim(include_result['include'])
include_file = templar.template(include_file)
inc_file = IncludedFile(include_file, include_args, special_vars, original_task)
else:
# template the included role's name here
role_name = include_args.pop('name', include_args.pop('role', None))
if role_name is not None:
role_name = templar.template(role_name)
new_task = original_task.copy()
new_task.post_validate(templar=templar)
new_task._role_name = role_name
for from_arg in new_task.FROM_ARGS:
if from_arg in include_args:
from_key = from_arg.removesuffix('_from')
new_task._from_files[from_key] = templar.template(include_args.pop(from_arg))
omit_token = task_vars.get('omit')
if omit_token:
new_task._from_files = remove_omit(new_task._from_files, omit_token)
inc_file = IncludedFile(role_name, include_args, special_vars, new_task, is_role=True)
idx = 0
orig_inc_file = inc_file
while 1:
try:
pos = included_files[idx:].index(orig_inc_file)
# pos is relative to idx since we are slicing
# use idx + pos due to relative indexing
inc_file = included_files[idx + pos]
except ValueError:
included_files.append(orig_inc_file)
inc_file = orig_inc_file
try:
inc_file.add_host(original_host)
inc_file._results.append(res)
except ValueError:
# The host already exists for this include, advance forward, this is a new include
idx += pos + 1
else:
break
return included_files
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,241 |
handler include_tasks fails if no `main.yaml` in role tasks in Ansible 2.15 - worked in 2.14
|
### Summary
In Ansible 2.14.3, e.g. package available in Debian Bullseye, `include_tasks` in a handler within a role always found the task file from `tasks` in that role, if it existed. When I install the current version using pip (2.15.6 at time of writing) this stops working if `tasks/main.yaml` does not exist in the role - creating an empty `main.yaml` (e.g. `touch roles/role_name/tasks/main.yaml`) is sufficient for `import_tasks` in a handler to start working again.
I have manually downgraded to 2.14.3 with pip and verified the same code works with 2.14.3 but not 2.15.6 on the same system, so this is not a difference with the OS package compared to the one pip installs and a bug introduced between those two versions.
In my specific use case, I have a role that is designed to be used via several entry points and has no default `main.yaml`. This has been working fine but started erroring in version 2.15.6. It has taken quite a bit of testing to discover what was making it fail (and why other similar roles for which I used the same pattern but did happen to have a `main.yaml`, were still working).
### Issue Type
Bug Report
### Component Name
ansible.builtin.import_tasks
### Ansible Version
```console
$ # Broken version
$ ansible --version
ansible [core 2.15.6]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
$ # Working version (after "pip install --upgrade ansible-core==2.14.3")
$ ansible --version
ansible [core 2.14.3]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
```
### OS / Environment
Debian 12.0
Ansible installed in Python 3 VirtualEnv with this `requirements.txt` however should be reproducible with just `ansible-core`:
```
ansible-core
# dnspython is required for community.general.dig lookup plugin
dnspython
# hvac required for Hashicorp Vault
hvac
# netaddr required for ansible.utils.ipaddr filter
netaddr
# To manage Windows systems
pywinrm[credssp]
```
### Steps to Reproduce
`site.yaml`:
```yaml
---
- name: Test
hosts: all
gather_facts: false # Not needed for example
tasks:
- ansible.builtin.import_role:
name: test_role
# Custom entry point (no main.yaml required in test_role/tasks)
tasks_from: entry_point.yaml
...
```
`roles/test_role/tasks/entry_point.yaml`:
```yaml
---
# This is not necessary to reproduce the problem, just
# illustrating include_tasks works here without `main.yaml`.
- name: Include tasks works here
ansible.builtin.include_tasks: included_tasks.yaml
# Somehow trigger the role's handler - how is not important,
# but running the handler is necessary to illustrate the problem.
- name: Trigger handler
ansible.builtin.debug:
changed_when: true # Force handler to always be notified
notify: Test handler
...
```
`roles/test_role/tasks/included_tasks.yaml`:
```yaml
---
- name: Included task
ansible.builtin.debug: msg="Included task"
...
```
`roles/test_role/handlers/main.yaml`:
```yaml
---
- name: Test handler
ansible.builtin.include_tasks: included_tasks.yaml
...
```
### Expected Results
When handler runs, `included_tasks` file is found in role's `tasks` folder (expected behaviour can be seen by running with Ansible 2.14.3 or if empty `main.yaml` has been added to `test_role/tasks`):
```
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
With Ansible 2.15.6, error that `included_tasks.yaml` does not exist - in this example, despite it being included successfully in another task list. It is unexpected that `include_tasks`, even when used from a handler, cannot find a tasks file in the the role's `tasks` directory (i.e. where all of a role's tasks live). This used to work fine (and still does if downgrade `ansible-core` to previous version):
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
fatal: [localhost]: FAILED! => {"reason": "Could not find or access '/tmp/test/included_tasks.yaml' on the Ansible Controller."}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=3 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
Note that creating `roles/test_role/main.yaml` causes the same code to begin working with 2.15.6 as well as 2.14.3. Although `main.yaml` is the default entry point, I am not aware of any other Ansible functionality that breaks if it does not exist and only other entry points are used for a specific role, so I think this is a bug.
```
$ touch roles/test_role/tasks/main.yaml
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82241
|
https://github.com/ansible/ansible/pull/82248
|
a4b00793be46f703e32ee4c440f303d19d2c652d
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
| 2023-11-18T17:48:36Z |
python
| 2023-11-22T16:42:51Z |
test/integration/targets/handlers/82241.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,241 |
handler include_tasks fails if no `main.yaml` in role tasks in Ansible 2.15 - worked in 2.14
|
### Summary
In Ansible 2.14.3, e.g. package available in Debian Bullseye, `include_tasks` in a handler within a role always found the task file from `tasks` in that role, if it existed. When I install the current version using pip (2.15.6 at time of writing) this stops working if `tasks/main.yaml` does not exist in the role - creating an empty `main.yaml` (e.g. `touch roles/role_name/tasks/main.yaml`) is sufficient for `import_tasks` in a handler to start working again.
I have manually downgraded to 2.14.3 with pip and verified the same code works with 2.14.3 but not 2.15.6 on the same system, so this is not a difference with the OS package compared to the one pip installs and a bug introduced between those two versions.
In my specific use case, I have a role that is designed to be used via several entry points and has no default `main.yaml`. This has been working fine but started erroring in version 2.15.6. It has taken quite a bit of testing to discover what was making it fail (and why other similar roles for which I used the same pattern but did happen to have a `main.yaml`, were still working).
### Issue Type
Bug Report
### Component Name
ansible.builtin.import_tasks
### Ansible Version
```console
$ # Broken version
$ ansible --version
ansible [core 2.15.6]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
$ # Working version (after "pip install --upgrade ansible-core==2.14.3")
$ ansible --version
ansible [core 2.14.3]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
```
### OS / Environment
Debian 12.0
Ansible installed in Python 3 VirtualEnv with this `requirements.txt` however should be reproducible with just `ansible-core`:
```
ansible-core
# dnspython is required for community.general.dig lookup plugin
dnspython
# hvac required for Hashicorp Vault
hvac
# netaddr required for ansible.utils.ipaddr filter
netaddr
# To manage Windows systems
pywinrm[credssp]
```
### Steps to Reproduce
`site.yaml`:
```yaml
---
- name: Test
hosts: all
gather_facts: false # Not needed for example
tasks:
- ansible.builtin.import_role:
name: test_role
# Custom entry point (no main.yaml required in test_role/tasks)
tasks_from: entry_point.yaml
...
```
`roles/test_role/tasks/entry_point.yaml`:
```yaml
---
# This is not necessary to reproduce the problem, just
# illustrating include_tasks works here without `main.yaml`.
- name: Include tasks works here
ansible.builtin.include_tasks: included_tasks.yaml
# Somehow trigger the role's handler - how is not important,
# but running the handler is necessary to illustrate the problem.
- name: Trigger handler
ansible.builtin.debug:
changed_when: true # Force handler to always be notified
notify: Test handler
...
```
`roles/test_role/tasks/included_tasks.yaml`:
```yaml
---
- name: Included task
ansible.builtin.debug: msg="Included task"
...
```
`roles/test_role/handlers/main.yaml`:
```yaml
---
- name: Test handler
ansible.builtin.include_tasks: included_tasks.yaml
...
```
### Expected Results
When handler runs, `included_tasks` file is found in role's `tasks` folder (expected behaviour can be seen by running with Ansible 2.14.3 or if empty `main.yaml` has been added to `test_role/tasks`):
```
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
With Ansible 2.15.6, error that `included_tasks.yaml` does not exist - in this example, despite it being included successfully in another task list. It is unexpected that `include_tasks`, even when used from a handler, cannot find a tasks file in the the role's `tasks` directory (i.e. where all of a role's tasks live). This used to work fine (and still does if downgrade `ansible-core` to previous version):
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
fatal: [localhost]: FAILED! => {"reason": "Could not find or access '/tmp/test/included_tasks.yaml' on the Ansible Controller."}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=3 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
Note that creating `roles/test_role/main.yaml` causes the same code to begin working with 2.15.6 as well as 2.14.3. Although `main.yaml` is the default entry point, I am not aware of any other Ansible functionality that breaks if it does not exist and only other entry points are used for a specific role, so I think this is a bug.
```
$ touch roles/test_role/tasks/main.yaml
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82241
|
https://github.com/ansible/ansible/pull/82248
|
a4b00793be46f703e32ee4c440f303d19d2c652d
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
| 2023-11-18T17:48:36Z |
python
| 2023-11-22T16:42:51Z |
test/integration/targets/handlers/roles/role-82241/handlers/main.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,241 |
handler include_tasks fails if no `main.yaml` in role tasks in Ansible 2.15 - worked in 2.14
|
### Summary
In Ansible 2.14.3, e.g. package available in Debian Bullseye, `include_tasks` in a handler within a role always found the task file from `tasks` in that role, if it existed. When I install the current version using pip (2.15.6 at time of writing) this stops working if `tasks/main.yaml` does not exist in the role - creating an empty `main.yaml` (e.g. `touch roles/role_name/tasks/main.yaml`) is sufficient for `import_tasks` in a handler to start working again.
I have manually downgraded to 2.14.3 with pip and verified the same code works with 2.14.3 but not 2.15.6 on the same system, so this is not a difference with the OS package compared to the one pip installs and a bug introduced between those two versions.
In my specific use case, I have a role that is designed to be used via several entry points and has no default `main.yaml`. This has been working fine but started erroring in version 2.15.6. It has taken quite a bit of testing to discover what was making it fail (and why other similar roles for which I used the same pattern but did happen to have a `main.yaml`, were still working).
### Issue Type
Bug Report
### Component Name
ansible.builtin.import_tasks
### Ansible Version
```console
$ # Broken version
$ ansible --version
ansible [core 2.15.6]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
$ # Working version (after "pip install --upgrade ansible-core==2.14.3")
$ ansible --version
ansible [core 2.14.3]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
```
### OS / Environment
Debian 12.0
Ansible installed in Python 3 VirtualEnv with this `requirements.txt` however should be reproducible with just `ansible-core`:
```
ansible-core
# dnspython is required for community.general.dig lookup plugin
dnspython
# hvac required for Hashicorp Vault
hvac
# netaddr required for ansible.utils.ipaddr filter
netaddr
# To manage Windows systems
pywinrm[credssp]
```
### Steps to Reproduce
`site.yaml`:
```yaml
---
- name: Test
hosts: all
gather_facts: false # Not needed for example
tasks:
- ansible.builtin.import_role:
name: test_role
# Custom entry point (no main.yaml required in test_role/tasks)
tasks_from: entry_point.yaml
...
```
`roles/test_role/tasks/entry_point.yaml`:
```yaml
---
# This is not necessary to reproduce the problem, just
# illustrating include_tasks works here without `main.yaml`.
- name: Include tasks works here
ansible.builtin.include_tasks: included_tasks.yaml
# Somehow trigger the role's handler - how is not important,
# but running the handler is necessary to illustrate the problem.
- name: Trigger handler
ansible.builtin.debug:
changed_when: true # Force handler to always be notified
notify: Test handler
...
```
`roles/test_role/tasks/included_tasks.yaml`:
```yaml
---
- name: Included task
ansible.builtin.debug: msg="Included task"
...
```
`roles/test_role/handlers/main.yaml`:
```yaml
---
- name: Test handler
ansible.builtin.include_tasks: included_tasks.yaml
...
```
### Expected Results
When handler runs, `included_tasks` file is found in role's `tasks` folder (expected behaviour can be seen by running with Ansible 2.14.3 or if empty `main.yaml` has been added to `test_role/tasks`):
```
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
With Ansible 2.15.6, error that `included_tasks.yaml` does not exist - in this example, despite it being included successfully in another task list. It is unexpected that `include_tasks`, even when used from a handler, cannot find a tasks file in the the role's `tasks` directory (i.e. where all of a role's tasks live). This used to work fine (and still does if downgrade `ansible-core` to previous version):
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
fatal: [localhost]: FAILED! => {"reason": "Could not find or access '/tmp/test/included_tasks.yaml' on the Ansible Controller."}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=3 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
Note that creating `roles/test_role/main.yaml` causes the same code to begin working with 2.15.6 as well as 2.14.3. Although `main.yaml` is the default entry point, I am not aware of any other Ansible functionality that breaks if it does not exist and only other entry points are used for a specific role, so I think this is a bug.
```
$ touch roles/test_role/tasks/main.yaml
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82241
|
https://github.com/ansible/ansible/pull/82248
|
a4b00793be46f703e32ee4c440f303d19d2c652d
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
| 2023-11-18T17:48:36Z |
python
| 2023-11-22T16:42:51Z |
test/integration/targets/handlers/roles/role-82241/tasks/entry_point.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,241 |
handler include_tasks fails if no `main.yaml` in role tasks in Ansible 2.15 - worked in 2.14
|
### Summary
In Ansible 2.14.3, e.g. package available in Debian Bullseye, `include_tasks` in a handler within a role always found the task file from `tasks` in that role, if it existed. When I install the current version using pip (2.15.6 at time of writing) this stops working if `tasks/main.yaml` does not exist in the role - creating an empty `main.yaml` (e.g. `touch roles/role_name/tasks/main.yaml`) is sufficient for `import_tasks` in a handler to start working again.
I have manually downgraded to 2.14.3 with pip and verified the same code works with 2.14.3 but not 2.15.6 on the same system, so this is not a difference with the OS package compared to the one pip installs and a bug introduced between those two versions.
In my specific use case, I have a role that is designed to be used via several entry points and has no default `main.yaml`. This has been working fine but started erroring in version 2.15.6. It has taken quite a bit of testing to discover what was making it fail (and why other similar roles for which I used the same pattern but did happen to have a `main.yaml`, were still working).
### Issue Type
Bug Report
### Component Name
ansible.builtin.import_tasks
### Ansible Version
```console
$ # Broken version
$ ansible --version
ansible [core 2.15.6]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
$ # Working version (after "pip install --upgrade ansible-core==2.14.3")
$ ansible --version
ansible [core 2.14.3]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
```
### OS / Environment
Debian 12.0
Ansible installed in Python 3 VirtualEnv with this `requirements.txt` however should be reproducible with just `ansible-core`:
```
ansible-core
# dnspython is required for community.general.dig lookup plugin
dnspython
# hvac required for Hashicorp Vault
hvac
# netaddr required for ansible.utils.ipaddr filter
netaddr
# To manage Windows systems
pywinrm[credssp]
```
### Steps to Reproduce
`site.yaml`:
```yaml
---
- name: Test
hosts: all
gather_facts: false # Not needed for example
tasks:
- ansible.builtin.import_role:
name: test_role
# Custom entry point (no main.yaml required in test_role/tasks)
tasks_from: entry_point.yaml
...
```
`roles/test_role/tasks/entry_point.yaml`:
```yaml
---
# This is not necessary to reproduce the problem, just
# illustrating include_tasks works here without `main.yaml`.
- name: Include tasks works here
ansible.builtin.include_tasks: included_tasks.yaml
# Somehow trigger the role's handler - how is not important,
# but running the handler is necessary to illustrate the problem.
- name: Trigger handler
ansible.builtin.debug:
changed_when: true # Force handler to always be notified
notify: Test handler
...
```
`roles/test_role/tasks/included_tasks.yaml`:
```yaml
---
- name: Included task
ansible.builtin.debug: msg="Included task"
...
```
`roles/test_role/handlers/main.yaml`:
```yaml
---
- name: Test handler
ansible.builtin.include_tasks: included_tasks.yaml
...
```
### Expected Results
When handler runs, `included_tasks` file is found in role's `tasks` folder (expected behaviour can be seen by running with Ansible 2.14.3 or if empty `main.yaml` has been added to `test_role/tasks`):
```
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
With Ansible 2.15.6, error that `included_tasks.yaml` does not exist - in this example, despite it being included successfully in another task list. It is unexpected that `include_tasks`, even when used from a handler, cannot find a tasks file in the the role's `tasks` directory (i.e. where all of a role's tasks live). This used to work fine (and still does if downgrade `ansible-core` to previous version):
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
fatal: [localhost]: FAILED! => {"reason": "Could not find or access '/tmp/test/included_tasks.yaml' on the Ansible Controller."}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=3 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
Note that creating `roles/test_role/main.yaml` causes the same code to begin working with 2.15.6 as well as 2.14.3. Although `main.yaml` is the default entry point, I am not aware of any other Ansible functionality that breaks if it does not exist and only other entry points are used for a specific role, so I think this is a bug.
```
$ touch roles/test_role/tasks/main.yaml
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82241
|
https://github.com/ansible/ansible/pull/82248
|
a4b00793be46f703e32ee4c440f303d19d2c652d
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
| 2023-11-18T17:48:36Z |
python
| 2023-11-22T16:42:51Z |
test/integration/targets/handlers/roles/role-82241/tasks/included_tasks.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,241 |
handler include_tasks fails if no `main.yaml` in role tasks in Ansible 2.15 - worked in 2.14
|
### Summary
In Ansible 2.14.3, e.g. package available in Debian Bullseye, `include_tasks` in a handler within a role always found the task file from `tasks` in that role, if it existed. When I install the current version using pip (2.15.6 at time of writing) this stops working if `tasks/main.yaml` does not exist in the role - creating an empty `main.yaml` (e.g. `touch roles/role_name/tasks/main.yaml`) is sufficient for `import_tasks` in a handler to start working again.
I have manually downgraded to 2.14.3 with pip and verified the same code works with 2.14.3 but not 2.15.6 on the same system, so this is not a difference with the OS package compared to the one pip installs and a bug introduced between those two versions.
In my specific use case, I have a role that is designed to be used via several entry points and has no default `main.yaml`. This has been working fine but started erroring in version 2.15.6. It has taken quite a bit of testing to discover what was making it fail (and why other similar roles for which I used the same pattern but did happen to have a `main.yaml`, were still working).
### Issue Type
Bug Report
### Component Name
ansible.builtin.import_tasks
### Ansible Version
```console
$ # Broken version
$ ansible --version
ansible [core 2.15.6]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
$ # Working version (after "pip install --upgrade ansible-core==2.14.3")
$ ansible --version
ansible [core 2.14.3]
config file = /home/laurence/Projects/ansible-home/ansible.cfg
configured module search path = ['/home/laurence/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/laurence/venvs/ansible/lib/python3.9/site-packages/ansible
ansible collection location = /home/laurence/.ansible/collections:/usr/share/ansible/collections
executable location = /home/laurence/venvs/ansible/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/home/laurence/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
```
### OS / Environment
Debian 12.0
Ansible installed in Python 3 VirtualEnv with this `requirements.txt` however should be reproducible with just `ansible-core`:
```
ansible-core
# dnspython is required for community.general.dig lookup plugin
dnspython
# hvac required for Hashicorp Vault
hvac
# netaddr required for ansible.utils.ipaddr filter
netaddr
# To manage Windows systems
pywinrm[credssp]
```
### Steps to Reproduce
`site.yaml`:
```yaml
---
- name: Test
hosts: all
gather_facts: false # Not needed for example
tasks:
- ansible.builtin.import_role:
name: test_role
# Custom entry point (no main.yaml required in test_role/tasks)
tasks_from: entry_point.yaml
...
```
`roles/test_role/tasks/entry_point.yaml`:
```yaml
---
# This is not necessary to reproduce the problem, just
# illustrating include_tasks works here without `main.yaml`.
- name: Include tasks works here
ansible.builtin.include_tasks: included_tasks.yaml
# Somehow trigger the role's handler - how is not important,
# but running the handler is necessary to illustrate the problem.
- name: Trigger handler
ansible.builtin.debug:
changed_when: true # Force handler to always be notified
notify: Test handler
...
```
`roles/test_role/tasks/included_tasks.yaml`:
```yaml
---
- name: Included task
ansible.builtin.debug: msg="Included task"
...
```
`roles/test_role/handlers/main.yaml`:
```yaml
---
- name: Test handler
ansible.builtin.include_tasks: included_tasks.yaml
...
```
### Expected Results
When handler runs, `included_tasks` file is found in role's `tasks` folder (expected behaviour can be seen by running with Ansible 2.14.3 or if empty `main.yaml` has been added to `test_role/tasks`):
```
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
With Ansible 2.15.6, error that `included_tasks.yaml` does not exist - in this example, despite it being included successfully in another task list. It is unexpected that `include_tasks`, even when used from a handler, cannot find a tasks file in the the role's `tasks` directory (i.e. where all of a role's tasks live). This used to work fine (and still does if downgrade `ansible-core` to previous version):
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
fatal: [localhost]: FAILED! => {"reason": "Could not find or access '/tmp/test/included_tasks.yaml' on the Ansible Controller."}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=3 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
Note that creating `roles/test_role/main.yaml` causes the same code to begin working with 2.15.6 as well as 2.14.3. Although `main.yaml` is the default entry point, I am not aware of any other Ansible functionality that breaks if it does not exist and only other entry points are used for a specific role, so I think this is a bug.
```
$ touch roles/test_role/tasks/main.yaml
$ ansible-playbook -i localhost, site.yaml
PLAY [Test] ********************************************************************************************************************************************************************************************************************
TASK [test_role : Include tasks works here] ************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
TASK [test_role : Included task] ***********************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
TASK [test_role : Trigger handler] *********************************************************************************************************************************************************************************************
changed: [localhost] => {
"msg": "Hello world!"
}
RUNNING HANDLER [test_role : Test handler] *************************************************************************************************************************************************************************************
included: /tmp/test/roles/test_role/tasks/included_tasks.yaml for localhost
RUNNING HANDLER [test_role : Included task] ************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": "Included task"
}
PLAY RECAP *********************************************************************************************************************************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82241
|
https://github.com/ansible/ansible/pull/82248
|
a4b00793be46f703e32ee4c440f303d19d2c652d
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
| 2023-11-18T17:48:36Z |
python
| 2023-11-22T16:42:51Z |
test/integration/targets/handlers/runme.sh
|
#!/usr/bin/env bash
set -eux
export ANSIBLE_FORCE_HANDLERS
ANSIBLE_FORCE_HANDLERS=false
# simple handler test
ansible-playbook test_handlers.yml -i inventory.handlers -v "$@" --tags scenario1
# simple from_handlers test
ansible-playbook from_handlers.yml -i inventory.handlers -v "$@" --tags scenario1
ansible-playbook test_listening_handlers.yml -i inventory.handlers -v "$@"
[ "$(ansible-playbook test_handlers.yml -i inventory.handlers -v "$@" --tags scenario2 -l A \
| grep -E -o 'RUNNING HANDLER \[test_handlers : .*]')" = "RUNNING HANDLER [test_handlers : test handler]" ]
# Test forcing handlers using the linear and free strategy
for strategy in linear free; do
export ANSIBLE_STRATEGY=$strategy
# Not forcing, should only run on successful host
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_B" ]
# Forcing from command line
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing from command line, should only run later tasks on unfailed hosts
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers \
| grep -E -o CALLED_TASK_. | sort | uniq | xargs)" = "CALLED_TASK_B CALLED_TASK_D CALLED_TASK_E" ]
# Forcing from command line, should call handlers even if all hosts fail
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers -e fail_all=yes \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing from ansible.cfg
[ "$(ANSIBLE_FORCE_HANDLERS=true ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing true in play
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags force_true_in_play \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing false in play, which overrides command line
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags force_false_in_play --force-handlers \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_B" ]
# https://github.com/ansible/ansible/pull/80898
[ "$(ansible-playbook 80880.yml -i inventory.handlers -vv "$@" 2>&1)" ]
unset ANSIBLE_STRATEGY
done
[ "$(ansible-playbook test_handlers_include.yml -i ../../inventory -v "$@" --tags playbook_include_handlers \
| grep -E -o 'RUNNING HANDLER \[.*]')" = "RUNNING HANDLER [test handler]" ]
[ "$(ansible-playbook test_handlers_include.yml -i ../../inventory -v "$@" --tags role_include_handlers \
| grep -E -o 'RUNNING HANDLER \[test_handlers_include : .*]')" = "RUNNING HANDLER [test_handlers_include : test handler]" ]
[ "$(ansible-playbook test_handlers_include_role.yml -i ../../inventory -v "$@" \
| grep -E -o 'RUNNING HANDLER \[test_handlers_include_role : .*]')" = "RUNNING HANDLER [test_handlers_include_role : test handler]" ]
# Notify handler listen
ansible-playbook test_handlers_listen.yml -i inventory.handlers -v "$@"
# Notify inexistent handlers results in error
set +e
result="$(ansible-playbook test_handlers_inexistent_notify.yml -i inventory.handlers "$@" 2>&1)"
set -e
grep -q "ERROR! The requested handler 'notify_inexistent_handler' was not found in either the main handlers list nor in the listening handlers list" <<< "$result"
# Notify inexistent handlers without errors when ANSIBLE_ERROR_ON_MISSING_HANDLER=false
ANSIBLE_ERROR_ON_MISSING_HANDLER=false ansible-playbook test_handlers_inexistent_notify.yml -i inventory.handlers -v "$@"
ANSIBLE_ERROR_ON_MISSING_HANDLER=false ansible-playbook test_templating_in_handlers.yml -v "$@"
# https://github.com/ansible/ansible/issues/36649
output_dir=/tmp
set +e
result="$(ansible-playbook test_handlers_any_errors_fatal.yml -e output_dir=$output_dir -i inventory.handlers -v "$@" 2>&1)"
set -e
[ ! -f $output_dir/should_not_exist_B ] || (rm -f $output_dir/should_not_exist_B && exit 1)
# https://github.com/ansible/ansible/issues/47287
[ "$(ansible-playbook test_handlers_including_task.yml -i ../../inventory -v "$@" | grep -E -o 'failed=[0-9]+')" = "failed=0" ]
# https://github.com/ansible/ansible/issues/71222
ansible-playbook test_role_handlers_including_tasks.yml -i ../../inventory -v "$@"
# https://github.com/ansible/ansible/issues/27237
set +e
result="$(ansible-playbook test_handlers_template_run_once.yml -i inventory.handlers "$@" 2>&1)"
set -e
grep -q "handler A" <<< "$result"
grep -q "handler B" <<< "$result"
# Test an undefined variable in another handler name isn't a failure
ansible-playbook 58841.yml "$@" --tags lazy_evaluation 2>&1 | tee out.txt ; cat out.txt
grep out.txt -e "\[WARNING\]: Handler 'handler name with {{ test_var }}' is unusable"
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "0" ]
# Test templating a handler name with a defined variable
ansible-playbook 58841.yml "$@" --tags evaluation_time -e test_var=myvar | tee out.txt ; cat out.txt
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "1" ]
# Test the handler is not found when the variable is undefined
ansible-playbook 58841.yml "$@" --tags evaluation_time 2>&1 | tee out.txt ; cat out.txt
grep out.txt -e "ERROR! The requested handler 'handler name with myvar' was not found"
grep out.txt -e "\[WARNING\]: Handler 'handler name with {{ test_var }}' is unusable"
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "0" ]
# Test include_role and import_role cannot be used as handlers
ansible-playbook test_role_as_handler.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using 'include_role' as a handler is not supported."
# Test notifying a handler from within include_tasks does not work anymore
ansible-playbook test_notify_included.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'I was included')" = "1" ]
grep out.txt -e "ERROR! The requested handler 'handler_from_include' was not found in either the main handlers list nor in the listening handlers list"
ansible-playbook test_handlers_meta.yml -i inventory.handlers -vv "$@" | tee out.txt
[ "$(grep out.txt -ce 'RUNNING HANDLER \[noop_handler\]')" = "1" ]
[ "$(grep out.txt -ce 'META: noop')" = "1" ]
# https://github.com/ansible/ansible/issues/46447
set +e
test "$(ansible-playbook 46447.yml -i inventory.handlers -vv "$@" 2>&1 | grep -c 'SHOULD NOT GET HERE')"
set -e
# https://github.com/ansible/ansible/issues/52561
ansible-playbook 52561.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler1 ran')" = "1" ]
# Test flush_handlers meta task does not imply any_errors_fatal
ansible-playbook 54991.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "4" ]
ansible-playbook order.yml -i inventory.handlers "$@" 2>&1
set +e
ansible-playbook order.yml --force-handlers -e test_force_handlers=true -i inventory.handlers "$@" 2>&1
set -e
ansible-playbook include_handlers_fail_force.yml --force-handlers -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'included handler ran')" = "1" ]
ansible-playbook test_flush_handlers_as_handler.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! flush_handlers cannot be used as a handler"
ansible-playbook test_skip_flush.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
ansible-playbook test_flush_in_rescue_always.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran in rescue')" = "1" ]
[ "$(grep out.txt -ce 'handler ran in always')" = "2" ]
[ "$(grep out.txt -ce 'lockstep works')" = "2" ]
ansible-playbook test_handlers_infinite_loop.yml -i inventory.handlers "$@" 2>&1
ansible-playbook test_flush_handlers_rescue_always.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'rescue ran')" = "1" ]
[ "$(grep out.txt -ce 'always ran')" = "2" ]
[ "$(grep out.txt -ce 'should run for both hosts')" = "2" ]
ansible-playbook test_fqcn_meta_flush_handlers.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
grep out.txt -e "handler ran"
grep out.txt -e "after flush"
ansible-playbook 79776.yml -i inventory.handlers "$@"
ansible-playbook test_block_as_handler.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_block_as_handler-include.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_block_as_handler-import.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_include_role_handler_once.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
ansible-playbook test_listen_role_dedup.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'a handler from a role')" = "1" ]
ansible localhost -m include_role -a "name=r1-dep_chain-vars" "$@"
ansible-playbook test_include_tasks_in_include_role.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
ansible-playbook test_run_once.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran once')" = "1" ]
ansible-playbook force_handlers_blocks_81533-1.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'task1')" = "1" ]
[ "$(grep out.txt -ce 'task2')" = "1" ]
[ "$(grep out.txt -ce 'hosts_left')" = "1" ]
ansible-playbook force_handlers_blocks_81533-2.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'hosts_left')" = "1" ]
ansible-playbook nested_flush_handlers_failure_force.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'flush_handlers_rescued')" = "1" ]
[ "$(grep out.txt -ce 'flush_handlers_always')" = "2" ]
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 80,267 |
`ansible.builtin.user`: Removing an already absent local user fails or produces a huge warning
|
### Summary
When I try to ensure, that users are removed from a system, the task succeeds the first time and the next time, it fails as it can not remove the non-existing user from the `/etc/passwd` file.
This issue could be potentially solved by adding the argument `local: true` at the task, but this results in a huge warning message for every user, which should get removed and does already not exist anymore: https://github.com/ansible/ansible/blob/ad9867ca5eb8ba27f827d5d5a7999cfb96ae0986/lib/ansible/modules/user.py#L1055-L1059
So either this behaviour is buggy when using `local: false` or the warning from `local: true` should get removed (or only printed when debug is enabled).
### Issue Type
Bug Report
### Component Name
ansible.builtin.user
### Ansible Version
```console
$ ansible --version
ansible [core 2.14.3]
config file = /home/skraetzig/Git/infrastructure/ansible.cfg
configured module search path = ['/home/skraetzig/Git/infrastructure/ansible/library']
ansible python module location = /usr/local/lib/python3.9/dist-packages/ansible
ansible collection location = /usr/share/ansible/third-party/collections
executable location = /usr/local/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ANSIBLE_FORCE_COLOR(env: ANSIBLE_FORCE_COLOR) = True
ANSIBLE_NOCOWS(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ANSIBLE_PIPELINING(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ANY_ERRORS_FATAL(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
COLLECTIONS_PATHS(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/usr/share/ansible/third-party/collections']
CONFIG_FILE() = /home/skraetzig/Git/infrastructure/ansible.cfg
DEFAULT_FILTER_PLUGIN_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/filter_plugins']
DEFAULT_FORKS(/home/skraetzig/Git/infrastructure/ansible.cfg) = 50
DEFAULT_LOCAL_TMP(env: ANSIBLE_LOCAL_TEMP) = /tmp/ansible-local-35zs1vlt9t
DEFAULT_MODULE_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/library']
DEFAULT_REMOTE_USER(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
DEFAULT_ROLES_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/roles', '/home/skraetzig/Git/infrastructure/ansible/actions', '/hom>
DIFF_ALWAYS(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
DISPLAY_SKIPPED_HOSTS(env: ANSIBLE_DISPLAY_SKIPPED_HOSTS) = True
INTERPRETER_PYTHON(/home/skraetzig/Git/infrastructure/ansible.cfg) = /usr/bin/python3
MAX_FILE_SIZE_FOR_DIFF(/home/skraetzig/Git/infrastructure/ansible.cfg) = 1044480
RETRY_FILES_ENABLED(/home/skraetzig/Git/infrastructure/ansible.cfg) = False
CALLBACK:
========
default:
_______
display_ok_hosts(env: ANSIBLE_DISPLAY_OK_HOSTS) = True
display_skipped_hosts(env: ANSIBLE_DISPLAY_SKIPPED_HOSTS) = True
CONNECTION:
==========
local:
_____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
paramiko_ssh:
____________
remote_user(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
ssh_args(env: ANSIBLE_SSH_ARGS) = -C -o ControlMaster=auto -o ControlPersist=60s
psrp:
____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ssh:
___
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
remote_user(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
ssh_args(env: ANSIBLE_SSH_ARGS) = -C -o ControlMaster=auto -o ControlPersist=60s
winrm:
_____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
```
### OS / Environment
Debian 10 (Buster) and 11 (Bullseye)
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: remove users
ansible.builtin.user:
name: "{{ item }}"
state: absent
remove: true
with_items:
- user1
- user2
- user3
```
### Expected Results
The listed users `user1`, `user2`, `user3` get successfully removed from the system, if they exist and if not, the task should be successfull without any warning.
### Actual Results
The first rollout works as expected. The users get successfully removed.
All other rollouts afterwards are then failing:
```console
TASK [users : remove users] ****************************************************
failed: [debian] (item=user1) => {"ansible_loop_var": "item", "changed": false, "item": "user1", "msg": "userdel: cannot remove entry 'user1' from /etc/passwd\n", "name": "user1", "rc": 1}
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/80267
|
https://github.com/ansible/ansible/pull/80291
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
|
e0bf76e3db3e007d039a0086276d35c28b90ff04
| 2023-03-21T20:34:12Z |
python
| 2023-11-23T14:25:35Z |
changelogs/fragments/80267-ansible_builtin_user-remove-user-not-found-warning.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 80,267 |
`ansible.builtin.user`: Removing an already absent local user fails or produces a huge warning
|
### Summary
When I try to ensure, that users are removed from a system, the task succeeds the first time and the next time, it fails as it can not remove the non-existing user from the `/etc/passwd` file.
This issue could be potentially solved by adding the argument `local: true` at the task, but this results in a huge warning message for every user, which should get removed and does already not exist anymore: https://github.com/ansible/ansible/blob/ad9867ca5eb8ba27f827d5d5a7999cfb96ae0986/lib/ansible/modules/user.py#L1055-L1059
So either this behaviour is buggy when using `local: false` or the warning from `local: true` should get removed (or only printed when debug is enabled).
### Issue Type
Bug Report
### Component Name
ansible.builtin.user
### Ansible Version
```console
$ ansible --version
ansible [core 2.14.3]
config file = /home/skraetzig/Git/infrastructure/ansible.cfg
configured module search path = ['/home/skraetzig/Git/infrastructure/ansible/library']
ansible python module location = /usr/local/lib/python3.9/dist-packages/ansible
ansible collection location = /usr/share/ansible/third-party/collections
executable location = /usr/local/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ANSIBLE_FORCE_COLOR(env: ANSIBLE_FORCE_COLOR) = True
ANSIBLE_NOCOWS(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ANSIBLE_PIPELINING(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ANY_ERRORS_FATAL(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
COLLECTIONS_PATHS(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/usr/share/ansible/third-party/collections']
CONFIG_FILE() = /home/skraetzig/Git/infrastructure/ansible.cfg
DEFAULT_FILTER_PLUGIN_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/filter_plugins']
DEFAULT_FORKS(/home/skraetzig/Git/infrastructure/ansible.cfg) = 50
DEFAULT_LOCAL_TMP(env: ANSIBLE_LOCAL_TEMP) = /tmp/ansible-local-35zs1vlt9t
DEFAULT_MODULE_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/library']
DEFAULT_REMOTE_USER(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
DEFAULT_ROLES_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/roles', '/home/skraetzig/Git/infrastructure/ansible/actions', '/hom>
DIFF_ALWAYS(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
DISPLAY_SKIPPED_HOSTS(env: ANSIBLE_DISPLAY_SKIPPED_HOSTS) = True
INTERPRETER_PYTHON(/home/skraetzig/Git/infrastructure/ansible.cfg) = /usr/bin/python3
MAX_FILE_SIZE_FOR_DIFF(/home/skraetzig/Git/infrastructure/ansible.cfg) = 1044480
RETRY_FILES_ENABLED(/home/skraetzig/Git/infrastructure/ansible.cfg) = False
CALLBACK:
========
default:
_______
display_ok_hosts(env: ANSIBLE_DISPLAY_OK_HOSTS) = True
display_skipped_hosts(env: ANSIBLE_DISPLAY_SKIPPED_HOSTS) = True
CONNECTION:
==========
local:
_____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
paramiko_ssh:
____________
remote_user(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
ssh_args(env: ANSIBLE_SSH_ARGS) = -C -o ControlMaster=auto -o ControlPersist=60s
psrp:
____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ssh:
___
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
remote_user(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
ssh_args(env: ANSIBLE_SSH_ARGS) = -C -o ControlMaster=auto -o ControlPersist=60s
winrm:
_____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
```
### OS / Environment
Debian 10 (Buster) and 11 (Bullseye)
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: remove users
ansible.builtin.user:
name: "{{ item }}"
state: absent
remove: true
with_items:
- user1
- user2
- user3
```
### Expected Results
The listed users `user1`, `user2`, `user3` get successfully removed from the system, if they exist and if not, the task should be successfull without any warning.
### Actual Results
The first rollout works as expected. The users get successfully removed.
All other rollouts afterwards are then failing:
```console
TASK [users : remove users] ****************************************************
failed: [debian] (item=user1) => {"ansible_loop_var": "item", "changed": false, "item": "user1", "msg": "userdel: cannot remove entry 'user1' from /etc/passwd\n", "name": "user1", "rc": 1}
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/80267
|
https://github.com/ansible/ansible/pull/80291
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
|
e0bf76e3db3e007d039a0086276d35c28b90ff04
| 2023-03-21T20:34:12Z |
python
| 2023-11-23T14:25:35Z |
lib/ansible/modules/user.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Stephen Fromm <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
DOCUMENTATION = r'''
module: user
version_added: "0.2"
short_description: Manage user accounts
description:
- Manage user accounts and user attributes.
- For Windows targets, use the M(ansible.windows.win_user) module instead.
options:
name:
description:
- Name of the user to create, remove or modify.
type: str
required: true
aliases: [ user ]
uid:
description:
- Optionally sets the I(UID) of the user.
type: int
comment:
description:
- Optionally sets the description (aka I(GECOS)) of user account.
- On macOS, this defaults to the O(name) option.
type: str
hidden:
description:
- macOS only, optionally hide the user from the login window and system preferences.
- The default will be V(true) if the O(system) option is used.
type: bool
version_added: "2.6"
non_unique:
description:
- Optionally when used with the -u option, this option allows to change the user ID to a non-unique value.
type: bool
default: no
version_added: "1.1"
seuser:
description:
- Optionally sets the seuser type (user_u) on selinux enabled systems.
type: str
version_added: "2.1"
group:
description:
- Optionally sets the user's primary group (takes a group name).
- On macOS, this defaults to V('staff')
type: str
groups:
description:
- A list of supplementary groups which the user is also a member of.
- By default, the user is removed from all other groups. Configure O(append) to modify this.
- When set to an empty string V(''),
the user is removed from all groups except the primary group.
- Before Ansible 2.3, the only input format allowed was a comma separated string.
type: list
elements: str
append:
description:
- If V(true), add the user to the groups specified in O(groups).
- If V(false), user will only be added to the groups specified in O(groups),
removing them from all other groups.
type: bool
default: no
shell:
description:
- Optionally set the user's shell.
- On macOS, before Ansible 2.5, the default shell for non-system users was V(/usr/bin/false).
Since Ansible 2.5, the default shell for non-system users on macOS is V(/bin/bash).
- On other operating systems, the default shell is determined by the underlying tool
invoked by this module. See Notes for a per platform list of invoked tools.
type: str
home:
description:
- Optionally set the user's home directory.
type: path
skeleton:
description:
- Optionally set a home skeleton directory.
- Requires O(create_home) option!
type: str
version_added: "2.0"
password:
description:
- If provided, set the user's password to the provided encrypted hash (Linux) or plain text password (macOS).
- B(Linux/Unix/POSIX:) Enter the hashed password as the value.
- See L(FAQ entry,https://docs.ansible.com/ansible/latest/reference_appendices/faq.html#how-do-i-generate-encrypted-passwords-for-the-user-module)
for details on various ways to generate the hash of a password.
- To create an account with a locked/disabled password on Linux systems, set this to V('!') or V('*').
- To create an account with a locked/disabled password on OpenBSD, set this to V('*************').
- B(OS X/macOS:) Enter the cleartext password as the value. Be sure to take relevant security precautions.
- On macOS, the password specified in the C(password) option will always be set, regardless of whether the user account already exists or not.
- When the password is passed as an argument, the C(user) module will always return changed to C(true) for macOS systems.
Since macOS no longer provides access to the hashed passwords directly.
type: str
state:
description:
- Whether the account should exist or not, taking action if the state is different from what is stated.
- See this L(FAQ entry,https://docs.ansible.com/ansible/latest/reference_appendices/faq.html#running-on-macos-as-a-target)
for additional requirements when removing users on macOS systems.
type: str
choices: [ absent, present ]
default: present
create_home:
description:
- Unless set to V(false), a home directory will be made for the user
when the account is created or if the home directory does not exist.
- Changed from O(createhome) to O(create_home) in Ansible 2.5.
type: bool
default: yes
aliases: [ createhome ]
move_home:
description:
- "If set to V(true) when used with O(home), attempt to move the user's old home
directory to the specified directory if it isn't there already and the old home exists."
type: bool
default: no
system:
description:
- When creating an account O(state=present), setting this to V(true) makes the user a system account.
- This setting cannot be changed on existing users.
type: bool
default: no
force:
description:
- This only affects O(state=absent), it forces removal of the user and associated directories on supported platforms.
- The behavior is the same as C(userdel --force), check the man page for C(userdel) on your system for details and support.
- When used with O(generate_ssh_key=yes) this forces an existing key to be overwritten.
type: bool
default: no
remove:
description:
- This only affects O(state=absent), it attempts to remove directories associated with the user.
- The behavior is the same as C(userdel --remove), check the man page for details and support.
type: bool
default: no
login_class:
description:
- Optionally sets the user's login class, a feature of most BSD OSs.
type: str
generate_ssh_key:
description:
- Whether to generate a SSH key for the user in question.
- This will B(not) overwrite an existing SSH key unless used with O(force=yes).
type: bool
default: no
version_added: "0.9"
ssh_key_bits:
description:
- Optionally specify number of bits in SSH key to create.
- The default value depends on ssh-keygen.
type: int
version_added: "0.9"
ssh_key_type:
description:
- Optionally specify the type of SSH key to generate.
- Available SSH key types will depend on implementation
present on target host.
type: str
default: rsa
version_added: "0.9"
ssh_key_file:
description:
- Optionally specify the SSH key filename.
- If this is a relative filename then it will be relative to the user's home directory.
- This parameter defaults to V(.ssh/id_rsa).
type: path
version_added: "0.9"
ssh_key_comment:
description:
- Optionally define the comment for the SSH key.
type: str
default: ansible-generated on $HOSTNAME
version_added: "0.9"
ssh_key_passphrase:
description:
- Set a passphrase for the SSH key.
- If no passphrase is provided, the SSH key will default to having no passphrase.
type: str
version_added: "0.9"
update_password:
description:
- V(always) will update passwords if they differ.
- V(on_create) will only set the password for newly created users.
type: str
choices: [ always, on_create ]
default: always
version_added: "1.3"
expires:
description:
- An expiry time for the user in epoch, it will be ignored on platforms that do not support this.
- Currently supported on GNU/Linux, FreeBSD, and DragonFlyBSD.
- Since Ansible 2.6 you can remove the expiry time by specifying a negative value.
Currently supported on GNU/Linux and FreeBSD.
type: float
version_added: "1.9"
password_lock:
description:
- Lock the password (C(usermod -L), C(usermod -U), C(pw lock)).
- Implementation differs by platform. This option does not always mean the user cannot login using other methods.
- This option does not disable the user, only lock the password.
- This must be set to V(False) in order to unlock a currently locked password. The absence of this parameter will not unlock a password.
- Currently supported on Linux, FreeBSD, DragonFlyBSD, NetBSD, OpenBSD.
type: bool
version_added: "2.6"
local:
description:
- Forces the use of "local" command alternatives on platforms that implement it.
- This is useful in environments that use centralized authentication when you want to manipulate the local users
(in other words, it uses C(luseradd) instead of C(useradd)).
- This will check C(/etc/passwd) for an existing account before invoking commands. If the local account database
exists somewhere other than C(/etc/passwd), this setting will not work properly.
- This requires that the above commands as well as C(/etc/passwd) must exist on the target host, otherwise it will be a fatal error.
type: bool
default: no
version_added: "2.4"
profile:
description:
- Sets the profile of the user.
- Can set multiple profiles using comma separation.
- To delete all the profiles, use O(profile='').
- Currently supported on Illumos/Solaris. Does nothing when used with other platforms.
type: str
version_added: "2.8"
authorization:
description:
- Sets the authorization of the user.
- Can set multiple authorizations using comma separation.
- To delete all authorizations, use O(authorization='').
- Currently supported on Illumos/Solaris. Does nothing when used with other platforms.
type: str
version_added: "2.8"
role:
description:
- Sets the role of the user.
- Can set multiple roles using comma separation.
- To delete all roles, use O(role='').
- Currently supported on Illumos/Solaris. Does nothing when used with other platforms.
type: str
version_added: "2.8"
password_expire_max:
description:
- Maximum number of days between password change.
- Supported on Linux only.
type: int
version_added: "2.11"
password_expire_min:
description:
- Minimum number of days between password change.
- Supported on Linux only.
type: int
version_added: "2.11"
password_expire_warn:
description:
- Number of days of warning before password expires.
- Supported on Linux only.
type: int
version_added: "2.16"
umask:
description:
- Sets the umask of the user.
- Currently supported on Linux. Does nothing when used with other platforms.
- Requires O(local) is omitted or V(False).
type: str
version_added: "2.12"
extends_documentation_fragment: action_common_attributes
attributes:
check_mode:
support: full
diff_mode:
support: none
platform:
platforms: posix
notes:
- There are specific requirements per platform on user management utilities. However
they generally come pre-installed with the system and Ansible will require they
are present at runtime. If they are not, a descriptive error message will be shown.
- On SunOS platforms, the shadow file is backed up automatically since this module edits it directly.
On other platforms, the shadow file is backed up by the underlying tools used by this module.
- On macOS, this module uses C(dscl) to create, modify, and delete accounts. C(dseditgroup) is used to
modify group membership. Accounts are hidden from the login window by modifying
C(/Library/Preferences/com.apple.loginwindow.plist).
- On FreeBSD, this module uses C(pw useradd) and C(chpass) to create, C(pw usermod) and C(chpass) to modify,
C(pw userdel) remove, C(pw lock) to lock, and C(pw unlock) to unlock accounts.
- On all other platforms, this module uses C(useradd) to create, C(usermod) to modify, and
C(userdel) to remove accounts.
seealso:
- module: ansible.posix.authorized_key
- module: ansible.builtin.group
- module: ansible.windows.win_user
author:
- Stephen Fromm (@sfromm)
'''
EXAMPLES = r'''
- name: Add the user 'johnd' with a specific uid and a primary group of 'admin'
ansible.builtin.user:
name: johnd
comment: John Doe
uid: 1040
group: admin
- name: Create a user 'johnd' with a home directory
ansible.builtin.user:
name: johnd
create_home: yes
- name: Add the user 'james' with a bash shell, appending the group 'admins' and 'developers' to the user's groups
ansible.builtin.user:
name: james
shell: /bin/bash
groups: admins,developers
append: yes
- name: Remove the user 'johnd'
ansible.builtin.user:
name: johnd
state: absent
remove: yes
- name: Create a 2048-bit SSH key for user jsmith in ~jsmith/.ssh/id_rsa
ansible.builtin.user:
name: jsmith
generate_ssh_key: yes
ssh_key_bits: 2048
ssh_key_file: .ssh/id_rsa
- name: Added a consultant whose account you want to expire
ansible.builtin.user:
name: james18
shell: /bin/zsh
groups: developers
expires: 1422403387
- name: Starting at Ansible 2.6, modify user, remove expiry time
ansible.builtin.user:
name: james18
expires: -1
- name: Set maximum expiration date for password
ansible.builtin.user:
name: ram19
password_expire_max: 10
- name: Set minimum expiration date for password
ansible.builtin.user:
name: pushkar15
password_expire_min: 5
- name: Set number of warning days for password expiration
ansible.builtin.user:
name: jane157
password_expire_warn: 30
'''
RETURN = r'''
append:
description: Whether or not to append the user to groups.
returned: When O(state) is V(present) and the user exists
type: bool
sample: True
comment:
description: Comment section from passwd file, usually the user name.
returned: When user exists
type: str
sample: Agent Smith
create_home:
description: Whether or not to create the home directory.
returned: When user does not exist and not check mode
type: bool
sample: True
force:
description: Whether or not a user account was forcibly deleted.
returned: When O(state) is V(absent) and user exists
type: bool
sample: False
group:
description: Primary user group ID
returned: When user exists
type: int
sample: 1001
groups:
description: List of groups of which the user is a member.
returned: When O(groups) is not empty and O(state) is V(present)
type: str
sample: 'chrony,apache'
home:
description: "Path to user's home directory."
returned: When O(state) is V(present)
type: str
sample: '/home/asmith'
move_home:
description: Whether or not to move an existing home directory.
returned: When O(state) is V(present) and user exists
type: bool
sample: False
name:
description: User account name.
returned: always
type: str
sample: asmith
password:
description: Masked value of the password.
returned: When O(state) is V(present) and O(password) is not empty
type: str
sample: 'NOT_LOGGING_PASSWORD'
remove:
description: Whether or not to remove the user account.
returned: When O(state) is V(absent) and user exists
type: bool
sample: True
shell:
description: User login shell.
returned: When O(state) is V(present)
type: str
sample: '/bin/bash'
ssh_fingerprint:
description: Fingerprint of generated SSH key.
returned: When O(generate_ssh_key) is V(True)
type: str
sample: '2048 SHA256:aYNHYcyVm87Igh0IMEDMbvW0QDlRQfE0aJugp684ko8 ansible-generated on host (RSA)'
ssh_key_file:
description: Path to generated SSH private key file.
returned: When O(generate_ssh_key) is V(True)
type: str
sample: /home/asmith/.ssh/id_rsa
ssh_public_key:
description: Generated SSH public key file.
returned: When O(generate_ssh_key) is V(True)
type: str
sample: >
'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC95opt4SPEC06tOYsJQJIuN23BbLMGmYo8ysVZQc4h2DZE9ugbjWWGS1/pweUGjVstgzMkBEeBCByaEf/RJKNecKRPeGd2Bw9DCj/bn5Z6rGfNENKBmo
618mUJBvdlEgea96QGjOwSB7/gmonduC7gsWDMNcOdSE3wJMTim4lddiBx4RgC9yXsJ6Tkz9BHD73MXPpT5ETnse+A3fw3IGVSjaueVnlUyUmOBf7fzmZbhlFVXf2Zi2rFTXqvbdGHKkzpw1U8eB8xFPP7y
d5u1u0e6Acju/8aZ/l17IDFiLke5IzlqIMRTEbDwLNeO84YQKWTm9fODHzhYe0yvxqLiK07 ansible-generated on host'
stderr:
description: Standard error from running commands.
returned: When stderr is returned by a command that is run
type: str
sample: Group wheels does not exist
stdout:
description: Standard output from running commands.
returned: When standard output is returned by the command that is run
type: str
sample:
system:
description: Whether or not the account is a system account.
returned: When O(system) is passed to the module and the account does not exist
type: bool
sample: True
uid:
description: User ID of the user account.
returned: When O(uid) is passed to the module
type: int
sample: 1044
'''
import ctypes.util
import grp
import calendar
import os
import re
import pty
import pwd
import select
import shutil
import socket
import subprocess
import time
import math
from ansible.module_utils import distro
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.locale import get_best_parsable_locale
from ansible.module_utils.common.sys_info import get_platform_subclass
import ansible.module_utils.compat.typing as t
class StructSpwdType(ctypes.Structure):
_fields_ = [
('sp_namp', ctypes.c_char_p),
('sp_pwdp', ctypes.c_char_p),
('sp_lstchg', ctypes.c_long),
('sp_min', ctypes.c_long),
('sp_max', ctypes.c_long),
('sp_warn', ctypes.c_long),
('sp_inact', ctypes.c_long),
('sp_expire', ctypes.c_long),
('sp_flag', ctypes.c_ulong),
]
try:
_LIBC = ctypes.cdll.LoadLibrary(
t.cast(
str,
ctypes.util.find_library('c')
)
)
_LIBC.getspnam.argtypes = (ctypes.c_char_p,)
_LIBC.getspnam.restype = ctypes.POINTER(StructSpwdType)
HAVE_SPWD = True
except AttributeError:
HAVE_SPWD = False
_HASH_RE = re.compile(r'[^a-zA-Z0-9./=]')
def getspnam(b_name):
return _LIBC.getspnam(b_name).contents
class User(object):
"""
This is a generic User manipulation class that is subclassed
based on platform.
A subclass may wish to override the following action methods:-
- create_user()
- remove_user()
- modify_user()
- ssh_key_gen()
- ssh_key_fingerprint()
- user_exists()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None # type: str | None
PASSWORDFILE = '/etc/passwd'
SHADOWFILE = '/etc/shadow' # type: str | None
SHADOWFILE_EXPIRE_INDEX = 7
LOGIN_DEFS = '/etc/login.defs'
DATE_FORMAT = '%Y-%m-%d'
def __new__(cls, *args, **kwargs):
new_cls = get_platform_subclass(User)
return super(cls, new_cls).__new__(new_cls)
def __init__(self, module):
self.module = module
self.state = module.params['state']
self.name = module.params['name']
self.uid = module.params['uid']
self.hidden = module.params['hidden']
self.non_unique = module.params['non_unique']
self.seuser = module.params['seuser']
self.group = module.params['group']
self.comment = module.params['comment']
self.shell = module.params['shell']
self.password = module.params['password']
self.force = module.params['force']
self.remove = module.params['remove']
self.create_home = module.params['create_home']
self.move_home = module.params['move_home']
self.skeleton = module.params['skeleton']
self.system = module.params['system']
self.login_class = module.params['login_class']
self.append = module.params['append']
self.sshkeygen = module.params['generate_ssh_key']
self.ssh_bits = module.params['ssh_key_bits']
self.ssh_type = module.params['ssh_key_type']
self.ssh_comment = module.params['ssh_key_comment']
self.ssh_passphrase = module.params['ssh_key_passphrase']
self.update_password = module.params['update_password']
self.home = module.params['home']
self.expires = None
self.password_lock = module.params['password_lock']
self.groups = None
self.local = module.params['local']
self.profile = module.params['profile']
self.authorization = module.params['authorization']
self.role = module.params['role']
self.password_expire_max = module.params['password_expire_max']
self.password_expire_min = module.params['password_expire_min']
self.password_expire_warn = module.params['password_expire_warn']
self.umask = module.params['umask']
if self.umask is not None and self.local:
module.fail_json(msg="'umask' can not be used with 'local'")
if module.params['groups'] is not None:
self.groups = ','.join(module.params['groups'])
if module.params['expires'] is not None:
try:
self.expires = time.gmtime(module.params['expires'])
except Exception as e:
module.fail_json(msg="Invalid value for 'expires' %s: %s" % (self.expires, to_native(e)))
if module.params['ssh_key_file'] is not None:
self.ssh_file = module.params['ssh_key_file']
else:
self.ssh_file = os.path.join('.ssh', 'id_%s' % self.ssh_type)
if self.groups is None and self.append:
# Change the argument_spec in 2.14 and remove this warning
# required_by={'append': ['groups']}
module.warn("'append' is set, but no 'groups' are specified. Use 'groups' for appending new groups."
"This will change to an error in Ansible 2.14.")
def check_password_encrypted(self):
# Darwin needs cleartext password, so skip validation
if self.module.params['password'] and self.platform != 'Darwin':
maybe_invalid = False
# Allow setting certain passwords in order to disable the account
if self.module.params['password'] in set(['*', '!', '*************']):
maybe_invalid = False
else:
# : for delimiter, * for disable user, ! for lock user
# these characters are invalid in the password
if any(char in self.module.params['password'] for char in ':*!'):
maybe_invalid = True
if '$' not in self.module.params['password']:
maybe_invalid = True
else:
fields = self.module.params['password'].split("$")
if len(fields) >= 3:
# contains character outside the crypto constraint
if bool(_HASH_RE.search(fields[-1])):
maybe_invalid = True
# md5
if fields[1] == '1' and len(fields[-1]) != 22:
maybe_invalid = True
# sha256
if fields[1] == '5' and len(fields[-1]) != 43:
maybe_invalid = True
# sha512
if fields[1] == '6' and len(fields[-1]) != 86:
maybe_invalid = True
# yescrypt
if fields[1] == 'y' and len(fields[-1]) != 43:
maybe_invalid = True
else:
maybe_invalid = True
if maybe_invalid:
self.module.warn("The input password appears not to have been hashed. "
"The 'password' argument must be encrypted for this module to work properly.")
def execute_command(self, cmd, use_unsafe_shell=False, data=None, obey_checkmode=True):
if self.module.check_mode and obey_checkmode:
self.module.debug('In check mode, would have run: "%s"' % cmd)
return (0, '', '')
else:
# cast all args to strings ansible-modules-core/issues/4397
cmd = [str(x) for x in cmd]
return self.module.run_command(cmd, use_unsafe_shell=use_unsafe_shell, data=data)
def backup_shadow(self):
if not self.module.check_mode and self.SHADOWFILE:
return self.module.backup_local(self.SHADOWFILE)
def remove_user_userdel(self):
if self.local:
command_name = 'luserdel'
else:
command_name = 'userdel'
cmd = [self.module.get_bin_path(command_name, True)]
if self.force and not self.local:
cmd.append('-f')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self):
if self.local:
command_name = 'luseradd'
lgroupmod_cmd = self.module.get_bin_path('lgroupmod', True)
lchage_cmd = self.module.get_bin_path('lchage', True)
else:
command_name = 'useradd'
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.seuser is not None:
cmd.append('-Z')
cmd.append(self.seuser)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
elif self.group_exists(self.name):
# use the -N option (no user group) if a group already
# exists with the same name as the user to prevent
# errors from useradd trying to create a group when
# USERGROUPS_ENAB is set in /etc/login.defs.
if self.local:
# luseradd uses -n instead of -N
cmd.append('-n')
else:
if os.path.exists('/etc/redhat-release'):
dist = distro.version()
major_release = int(dist.split('.')[0])
if major_release <= 5:
cmd.append('-n')
else:
cmd.append('-N')
elif os.path.exists('/etc/SuSE-release'):
# -N did not exist in useradd before SLE 11 and did not
# automatically create a group
dist = distro.version()
major_release = int(dist.split('.')[0])
if major_release >= 12:
cmd.append('-N')
else:
cmd.append('-N')
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
if not self.local:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
# If the specified path to the user home contains parent directories that
# do not exist and create_home is True first create the parent directory
# since useradd cannot create it.
if self.create_home:
parent = os.path.dirname(self.home)
if not os.path.isdir(parent):
self.create_homedir(self.home)
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.expires is not None and not self.local:
cmd.append('-e')
if self.expires < time.gmtime(0):
cmd.append('')
else:
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
if self.password is not None:
cmd.append('-p')
if self.password_lock:
cmd.append('!%s' % self.password)
else:
cmd.append(self.password)
if self.create_home:
if not self.local:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.umask is not None:
cmd.append('-K')
cmd.append('UMASK=' + self.umask)
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if not self.local or rc != 0:
return (rc, out, err)
if self.expires is not None:
if self.expires < time.gmtime(0):
lexpires = -1
else:
# Convert seconds since Epoch to days since Epoch
lexpires = int(math.floor(self.module.params['expires'])) // 86400
(rc, _out, _err) = self.execute_command([lchage_cmd, '-E', to_native(lexpires), self.name])
out += _out
err += _err
if rc != 0:
return (rc, out, err)
if self.groups is None or len(self.groups) == 0:
return (rc, out, err)
for add_group in groups:
(rc, _out, _err) = self.execute_command([lgroupmod_cmd, '-M', self.name, add_group])
out += _out
err += _err
if rc != 0:
return (rc, out, err)
return (rc, out, err)
def _check_usermod_append(self):
# check if this version of usermod can append groups
if self.local:
command_name = 'lusermod'
else:
command_name = 'usermod'
usermod_path = self.module.get_bin_path(command_name, True)
# for some reason, usermod --help cannot be used by non root
# on RH/Fedora, due to lack of execute bit for others
if not os.access(usermod_path, os.X_OK):
return False
cmd = [usermod_path, '--help']
(rc, data1, data2) = self.execute_command(cmd, obey_checkmode=False)
helpout = data1 + data2
# check if --append exists
lines = to_native(helpout).split('\n')
for line in lines:
if line.strip().startswith('-a, --append'):
return True
return False
def modify_user_usermod(self):
if self.local:
command_name = 'lusermod'
lgroupmod_cmd = self.module.get_bin_path('lgroupmod', True)
lgroupmod_add = set()
lgroupmod_del = set()
lchage_cmd = self.module.get_bin_path('lchage', True)
lexpires = None
else:
command_name = 'usermod'
cmd = [self.module.get_bin_path(command_name, True)]
info = self.user_info()
has_append = self._check_usermod_append()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(ginfo[2])
if self.groups is not None:
# get a list of all groups for the user, including the primary
current_groups = self.user_group_membership(exclude_primary=False)
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False, names_only=True)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
if has_append:
cmd.append('-a')
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if self.local:
if self.append:
lgroupmod_add = set(groups).difference(current_groups)
lgroupmod_del = set()
else:
lgroupmod_add = set(groups).difference(current_groups)
lgroupmod_del = set(current_groups).difference(groups)
else:
if self.append and not has_append:
cmd.append('-A')
cmd.append(','.join(group_diff))
else:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.expires is not None:
current_expires = self.user_password()[1] or '0'
current_expires = int(current_expires)
if self.expires < time.gmtime(0):
if current_expires >= 0:
if self.local:
lexpires = -1
else:
cmd.append('-e')
cmd.append('')
else:
# Convert days since Epoch to seconds since Epoch as struct_time
current_expire_date = time.gmtime(current_expires * 86400)
# Current expires is negative or we compare year, month, and day only
if current_expires < 0 or current_expire_date[:3] != self.expires[:3]:
if self.local:
# Convert seconds since Epoch to days since Epoch
lexpires = int(math.floor(self.module.params['expires'])) // 86400
else:
cmd.append('-e')
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
# Lock if no password or unlocked, unlock only if locked
if self.password_lock and not info[1].startswith('!'):
cmd.append('-L')
elif self.password_lock is False and info[1].startswith('!'):
# usermod will refuse to unlock a user with no password, module shows 'changed' regardless
cmd.append('-U')
if self.update_password == 'always' and self.password is not None and info[1].lstrip('!') != self.password.lstrip('!'):
# Remove options that are mutually exclusive with -p
cmd = [c for c in cmd if c not in ['-U', '-L']]
cmd.append('-p')
if self.password_lock:
# Lock the account and set the hash in a single command
cmd.append('!%s' % self.password)
else:
cmd.append(self.password)
(rc, out, err) = (None, '', '')
# skip if no usermod changes to be made
if len(cmd) > 1:
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if not self.local or not (rc is None or rc == 0):
return (rc, out, err)
if lexpires is not None:
(rc, _out, _err) = self.execute_command([lchage_cmd, '-E', to_native(lexpires), self.name])
out += _out
err += _err
if rc != 0:
return (rc, out, err)
if len(lgroupmod_add) == 0 and len(lgroupmod_del) == 0:
return (rc, out, err)
for add_group in lgroupmod_add:
(rc, _out, _err) = self.execute_command([lgroupmod_cmd, '-M', self.name, add_group])
out += _out
err += _err
if rc != 0:
return (rc, out, err)
for del_group in lgroupmod_del:
(rc, _out, _err) = self.execute_command([lgroupmod_cmd, '-m', self.name, del_group])
out += _out
err += _err
if rc != 0:
return (rc, out, err)
return (rc, out, err)
def group_exists(self, group):
try:
# Try group as a gid first
grp.getgrgid(int(group))
return True
except (ValueError, KeyError):
try:
grp.getgrnam(group)
return True
except KeyError:
return False
def group_info(self, group):
if not self.group_exists(group):
return False
try:
# Try group as a gid first
return list(grp.getgrgid(int(group)))
except (ValueError, KeyError):
return list(grp.getgrnam(group))
def get_groups_set(self, remove_existing=True, names_only=False):
if self.groups is None:
return None
info = self.user_info()
groups = set(x.strip() for x in self.groups.split(',') if x)
group_names = set()
for g in groups.copy():
if not self.group_exists(g):
self.module.fail_json(msg="Group %s does not exist" % (g))
group_info = self.group_info(g)
if info and remove_existing and group_info[2] == info[3]:
groups.remove(g)
elif names_only:
group_names.add(group_info[0])
if names_only:
return group_names
return groups
def user_group_membership(self, exclude_primary=True):
''' Return a list of groups the user belongs to '''
groups = []
info = self.get_pwd_info()
for group in grp.getgrall():
if self.name in group.gr_mem:
# Exclude the user's primary group by default
if not exclude_primary:
groups.append(group[0])
else:
if info[3] != group.gr_gid:
groups.append(group[0])
return groups
def user_exists(self):
# The pwd module does not distinguish between local and directory accounts.
# It's output cannot be used to determine whether or not an account exists locally.
# It returns True if the account exists locally or in the directory, so instead
# look in the local PASSWORD file for an existing account.
if self.local:
if not os.path.exists(self.PASSWORDFILE):
self.module.fail_json(msg="'local: true' specified but unable to find local account file {0} to parse.".format(self.PASSWORDFILE))
exists = False
name_test = '{0}:'.format(self.name)
with open(self.PASSWORDFILE, 'rb') as f:
reversed_lines = f.readlines()[::-1]
for line in reversed_lines:
if line.startswith(to_bytes(name_test)):
exists = True
break
if not exists:
self.module.warn(
"'local: true' specified and user '{name}' was not found in {file}. "
"The local user account may already exist if the local account database exists "
"somewhere other than {file}.".format(file=self.PASSWORDFILE, name=self.name))
return exists
else:
try:
if pwd.getpwnam(self.name):
return True
except KeyError:
return False
def get_pwd_info(self):
if not self.user_exists():
return False
return list(pwd.getpwnam(self.name))
def user_info(self):
if not self.user_exists():
return False
info = self.get_pwd_info()
if len(info[1]) == 1 or len(info[1]) == 0:
info[1] = self.user_password()[0]
return info
def set_password_expire(self):
min_needs_change = self.password_expire_min is not None
max_needs_change = self.password_expire_max is not None
warn_needs_change = self.password_expire_warn is not None
if HAVE_SPWD:
try:
shadow_info = getspnam(to_bytes(self.name))
except ValueError:
return None, '', ''
min_needs_change &= self.password_expire_min != shadow_info.sp_min
max_needs_change &= self.password_expire_max != shadow_info.sp_max
warn_needs_change &= self.password_expire_warn != shadow_info.sp_warn
if not (min_needs_change or max_needs_change or warn_needs_change):
return (None, '', '') # target state already reached
command_name = 'chage'
cmd = [self.module.get_bin_path(command_name, True)]
if min_needs_change:
cmd.extend(["-m", self.password_expire_min])
if max_needs_change:
cmd.extend(["-M", self.password_expire_max])
if warn_needs_change:
cmd.extend(["-W", self.password_expire_warn])
cmd.append(self.name)
return self.execute_command(cmd)
def user_password(self):
passwd = ''
expires = ''
if HAVE_SPWD:
try:
shadow_info = getspnam(to_bytes(self.name))
passwd = to_native(shadow_info.sp_pwdp)
expires = shadow_info.sp_expire
return passwd, expires
except ValueError:
return passwd, expires
if not self.user_exists():
return passwd, expires
elif self.SHADOWFILE:
passwd, expires = self.parse_shadow_file()
return passwd, expires
def parse_shadow_file(self):
passwd = ''
expires = ''
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
with open(self.SHADOWFILE, 'r') as f:
for line in f:
if line.startswith('%s:' % self.name):
passwd = line.split(':')[1]
expires = line.split(':')[self.SHADOWFILE_EXPIRE_INDEX] or -1
return passwd, expires
def get_ssh_key_path(self):
info = self.user_info()
if os.path.isabs(self.ssh_file):
ssh_key_file = self.ssh_file
else:
if not os.path.exists(info[5]) and not self.module.check_mode:
raise Exception('User %s home directory does not exist' % self.name)
ssh_key_file = os.path.join(info[5], self.ssh_file)
return ssh_key_file
def ssh_key_gen(self):
info = self.user_info()
overwrite = None
try:
ssh_key_file = self.get_ssh_key_path()
except Exception as e:
return (1, '', to_native(e))
ssh_dir = os.path.dirname(ssh_key_file)
if not os.path.exists(ssh_dir):
if self.module.check_mode:
return (0, '', '')
try:
os.mkdir(ssh_dir, int('0700', 8))
os.chown(ssh_dir, info[2], info[3])
except OSError as e:
return (1, '', 'Failed to create %s: %s' % (ssh_dir, to_native(e)))
if os.path.exists(ssh_key_file):
if self.force:
# ssh-keygen doesn't support overwriting the key interactively, so send 'y' to confirm
overwrite = 'y'
else:
return (None, 'Key already exists, use "force: yes" to overwrite', '')
cmd = [self.module.get_bin_path('ssh-keygen', True)]
cmd.append('-t')
cmd.append(self.ssh_type)
if self.ssh_bits > 0:
cmd.append('-b')
cmd.append(self.ssh_bits)
cmd.append('-C')
cmd.append(self.ssh_comment)
cmd.append('-f')
cmd.append(ssh_key_file)
if self.ssh_passphrase is not None:
if self.module.check_mode:
self.module.debug('In check mode, would have run: "%s"' % cmd)
return (0, '', '')
master_in_fd, slave_in_fd = pty.openpty()
master_out_fd, slave_out_fd = pty.openpty()
master_err_fd, slave_err_fd = pty.openpty()
env = os.environ.copy()
env['LC_ALL'] = get_best_parsable_locale(self.module)
try:
p = subprocess.Popen([to_bytes(c) for c in cmd],
stdin=slave_in_fd,
stdout=slave_out_fd,
stderr=slave_err_fd,
preexec_fn=os.setsid,
env=env)
out_buffer = b''
err_buffer = b''
while p.poll() is None:
r_list = select.select([master_out_fd, master_err_fd], [], [], 1)[0]
first_prompt = b'Enter passphrase (empty for no passphrase):'
second_prompt = b'Enter same passphrase again'
prompt = first_prompt
for fd in r_list:
if fd == master_out_fd:
chunk = os.read(master_out_fd, 10240)
out_buffer += chunk
if prompt in out_buffer:
os.write(master_in_fd, to_bytes(self.ssh_passphrase, errors='strict') + b'\r')
prompt = second_prompt
else:
chunk = os.read(master_err_fd, 10240)
err_buffer += chunk
if prompt in err_buffer:
os.write(master_in_fd, to_bytes(self.ssh_passphrase, errors='strict') + b'\r')
prompt = second_prompt
if b'Overwrite (y/n)?' in out_buffer or b'Overwrite (y/n)?' in err_buffer:
# The key was created between us checking for existence and now
return (None, 'Key already exists', '')
rc = p.returncode
out = to_native(out_buffer)
err = to_native(err_buffer)
except OSError as e:
return (1, '', to_native(e))
else:
cmd.append('-N')
cmd.append('')
(rc, out, err) = self.execute_command(cmd, data=overwrite)
if rc == 0 and not self.module.check_mode:
# If the keys were successfully created, we should be able
# to tweak ownership.
os.chown(ssh_key_file, info[2], info[3])
os.chown('%s.pub' % ssh_key_file, info[2], info[3])
return (rc, out, err)
def ssh_key_fingerprint(self):
ssh_key_file = self.get_ssh_key_path()
if not os.path.exists(ssh_key_file):
return (1, 'SSH Key file %s does not exist' % ssh_key_file, '')
cmd = [self.module.get_bin_path('ssh-keygen', True)]
cmd.append('-l')
cmd.append('-f')
cmd.append(ssh_key_file)
return self.execute_command(cmd, obey_checkmode=False)
def get_ssh_public_key(self):
ssh_public_key_file = '%s.pub' % self.get_ssh_key_path()
try:
with open(ssh_public_key_file, 'r') as f:
ssh_public_key = f.read().strip()
except IOError:
return None
return ssh_public_key
def create_user(self):
# by default we use the create_user_useradd method
return self.create_user_useradd()
def remove_user(self):
# by default we use the remove_user_userdel method
return self.remove_user_userdel()
def modify_user(self):
# by default we use the modify_user_usermod method
return self.modify_user_usermod()
def create_homedir(self, path):
if not os.path.exists(path):
if self.skeleton is not None:
skeleton = self.skeleton
else:
skeleton = '/etc/skel'
if os.path.exists(skeleton) and skeleton != os.devnull:
try:
shutil.copytree(skeleton, path, symlinks=True)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
else:
try:
os.makedirs(path)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
# get umask from /etc/login.defs and set correct home mode
if os.path.exists(self.LOGIN_DEFS):
with open(self.LOGIN_DEFS, 'r') as f:
for line in f:
m = re.match(r'^UMASK\s+(\d+)$', line)
if m:
umask = int(m.group(1), 8)
mode = 0o777 & ~umask
try:
os.chmod(path, mode)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
def chown_homedir(self, uid, gid, path):
try:
os.chown(path, uid, gid)
for root, dirs, files in os.walk(path):
for d in dirs:
os.chown(os.path.join(root, d), uid, gid)
for f in files:
os.chown(os.path.join(root, f), uid, gid)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
# ===========================================
class FreeBsdUser(User):
"""
This is a FreeBSD User manipulation class - it uses the pw command
to manipulate the user database, followed by the chpass command
to change the password.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'FreeBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
SHADOWFILE_EXPIRE_INDEX = 6
DATE_FORMAT = '%d-%b-%Y'
def _handle_lock(self):
info = self.user_info()
if self.password_lock and not info[1].startswith('*LOCKED*'):
cmd = [
self.module.get_bin_path('pw', True),
'lock',
self.name
]
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
return self.execute_command(cmd)
elif self.password_lock is False and info[1].startswith('*LOCKED*'):
cmd = [
self.module.get_bin_path('pw', True),
'unlock',
self.name
]
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
return self.execute_command(cmd)
return (None, '', '')
def remove_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'userdel',
'-n',
self.name
]
if self.remove:
cmd.append('-r')
return self.execute_command(cmd)
def create_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'useradd',
'-n',
self.name,
]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.umask is not None:
cmd.append('-K')
cmd.append('UMASK=' + self.umask)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.expires is not None:
cmd.append('-e')
if self.expires < time.gmtime(0):
cmd.append('0')
else:
cmd.append(str(calendar.timegm(self.expires)))
# system cannot be handled currently - should we error if its requested?
# create the user
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# we have to set the password in a second command
if self.password is not None:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
_rc, _out, _err = self.execute_command(cmd)
if rc is None:
rc = _rc
out += _out
err += _err
# we have to lock/unlock the password in a distinct command
_rc, _out, _err = self._handle_lock()
if rc is None:
rc = _rc
out += _out
err += _err
return (rc, out, err)
def modify_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'usermod',
'-n',
self.name
]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
if (info[5] != self.home and self.move_home) or (not os.path.exists(self.home) and self.create_home):
cmd.append('-m')
if info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.umask is not None:
cmd.append('-K')
cmd.append('UMASK=' + self.umask)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
with open(self.SHADOWFILE, 'r') as f:
for line in f:
if line.startswith('%s:' % self.name):
user_login_class = line.split(':')[4]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set(names_only=True)
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups = groups | set(current_groups)
cmd.append(','.join(new_groups))
if self.expires is not None:
current_expires = self.user_password()[1] or '0'
current_expires = int(current_expires)
# If expiration is negative or zero and the current expiration is greater than zero, disable expiration.
# In OpenBSD, setting expiration to zero disables expiration. It does not expire the account.
if self.expires <= time.gmtime(0):
if current_expires > 0:
cmd.append('-e')
cmd.append('0')
else:
# Convert days since Epoch to seconds since Epoch as struct_time
current_expire_date = time.gmtime(current_expires)
# Current expires is negative or we compare year, month, and day only
if current_expires <= 0 or current_expire_date[:3] != self.expires[:3]:
cmd.append('-e')
cmd.append(str(calendar.timegm(self.expires)))
(rc, out, err) = (None, '', '')
# modify the user if cmd will do anything
if cmd_len != len(cmd):
(rc, _out, _err) = self.execute_command(cmd)
out += _out
err += _err
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# we have to set the password in a second command
if self.update_password == 'always' and self.password is not None and info[1].lstrip('*LOCKED*') != self.password.lstrip('*LOCKED*'):
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
_rc, _out, _err = self.execute_command(cmd)
if rc is None:
rc = _rc
out += _out
err += _err
# we have to lock/unlock the password in a distinct command
_rc, _out, _err = self._handle_lock()
if rc is None:
rc = _rc
out += _out
err += _err
return (rc, out, err)
class DragonFlyBsdUser(FreeBsdUser):
"""
This is a DragonFlyBSD User manipulation class - it inherits the
FreeBsdUser class behaviors, such as using the pw command to
manipulate the user database, followed by the chpass command
to change the password.
"""
platform = 'DragonFly'
class OpenBSDUser(User):
"""
This is a OpenBSD User manipulation class.
Main differences are that OpenBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'OpenBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None and self.password != '*':
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.umask is not None:
cmd.append('-K')
cmd.append('UMASK=' + self.umask)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups_option = '-S'
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(names_only=True)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_option = '-G'
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append(groups_option)
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
userinfo_cmd = [self.module.get_bin_path('userinfo', True), self.name]
(rc, out, err) = self.execute_command(userinfo_cmd, obey_checkmode=False)
for line in out.splitlines():
tokens = line.split()
if tokens[0] == 'class' and len(tokens) == 2:
user_login_class = tokens[1]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.password_lock and not info[1].startswith('*'):
cmd.append('-Z')
elif self.password_lock is False and info[1].startswith('*'):
cmd.append('-U')
if self.update_password == 'always' and self.password is not None \
and self.password != '*' and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
class NetBSDUser(User):
"""
This is a NetBSD User manipulation class.
Main differences are that NetBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'NetBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.umask is not None:
cmd.append('-K')
cmd.append('UMASK=' + self.umask)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(names_only=True)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups = set(current_groups).union(groups)
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
if self.password_lock and not info[1].startswith('*LOCKED*'):
cmd.append('-C yes')
elif self.password_lock is False and info[1].startswith('*LOCKED*'):
cmd.append('-C no')
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
class SunOS(User):
"""
This is a SunOS User manipulation class - The main difference between
this class and the generic user class is that Solaris-type distros
don't support the concept of a "system" account and we need to
edit the /etc/shadow file manually to set a password. (Ugh)
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
- user_info()
"""
platform = 'SunOS'
distribution = None
SHADOWFILE = '/etc/shadow'
USER_ATTR = '/etc/user_attr'
def get_password_defaults(self):
# Read password aging defaults
try:
minweeks = ''
maxweeks = ''
warnweeks = ''
with open("/etc/default/passwd", 'r') as f:
for line in f:
line = line.strip()
if (line.startswith('#') or line == ''):
continue
m = re.match(r'^([^#]*)#(.*)$', line)
if m: # The line contains a hash / comment
line = m.group(1)
key, value = line.split('=')
if key == "MINWEEKS":
minweeks = value.rstrip('\n')
elif key == "MAXWEEKS":
maxweeks = value.rstrip('\n')
elif key == "WARNWEEKS":
warnweeks = value.rstrip('\n')
except Exception as err:
self.module.fail_json(msg="failed to read /etc/default/passwd: %s" % to_native(err))
return (minweeks, maxweeks, warnweeks)
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.umask is not None:
cmd.append('-K')
cmd.append('UMASK=' + self.umask)
if self.profile is not None:
cmd.append('-P')
cmd.append(self.profile)
if self.authorization is not None:
cmd.append('-A')
cmd.append(self.authorization)
if self.role is not None:
cmd.append('-R')
cmd.append(self.role)
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
if not self.module.check_mode:
# we have to set the password by editing the /etc/shadow file
if self.password is not None:
self.backup_shadow()
minweeks, maxweeks, warnweeks = self.get_password_defaults()
try:
lines = []
with open(self.SHADOWFILE, 'rb') as f:
for line in f:
line = to_native(line, errors='surrogate_or_strict')
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() // 86400))
if minweeks:
try:
fields[3] = str(int(minweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
if maxweeks:
try:
fields[4] = str(int(maxweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
if warnweeks:
try:
fields[5] = str(int(warnweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
line = ':'.join(fields)
lines.append('%s\n' % line)
with open(self.SHADOWFILE, 'w+') as f:
f.writelines(lines)
except Exception as err:
self.module.fail_json(msg="failed to update users password: %s" % to_native(err))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set(names_only=True)
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups.update(current_groups)
cmd.append(','.join(new_groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.profile is not None and info[7] != self.profile:
cmd.append('-P')
cmd.append(self.profile)
if self.authorization is not None and info[8] != self.authorization:
cmd.append('-A')
cmd.append(self.authorization)
if self.role is not None and info[9] != self.role:
cmd.append('-R')
cmd.append(self.role)
# modify the user if cmd will do anything
if cmd_len != len(cmd):
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password by editing the /etc/shadow file
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
self.backup_shadow()
(rc, out, err) = (0, '', '')
if not self.module.check_mode:
minweeks, maxweeks, warnweeks = self.get_password_defaults()
try:
lines = []
with open(self.SHADOWFILE, 'rb') as f:
for line in f:
line = to_native(line, errors='surrogate_or_strict')
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() // 86400))
if minweeks:
fields[3] = str(int(minweeks) * 7)
if maxweeks:
fields[4] = str(int(maxweeks) * 7)
if warnweeks:
fields[5] = str(int(warnweeks) * 7)
line = ':'.join(fields)
lines.append('%s\n' % line)
with open(self.SHADOWFILE, 'w+') as f:
f.writelines(lines)
rc = 0
except Exception as err:
self.module.fail_json(msg="failed to update users password: %s" % to_native(err))
return (rc, out, err)
def user_info(self):
info = super(SunOS, self).user_info()
if info:
info += self._user_attr_info()
return info
def _user_attr_info(self):
info = [''] * 3
with open(self.USER_ATTR, 'r') as file_handler:
for line in file_handler:
lines = line.strip().split('::::')
if lines[0] == self.name:
tmp = dict(x.split('=') for x in lines[1].split(';'))
info[0] = tmp.get('profiles', '')
info[1] = tmp.get('auths', '')
info[2] = tmp.get('roles', '')
return info
class DarwinUser(User):
"""
This is a Darwin macOS User manipulation class.
Main differences are that Darwin:-
- Handles accounts in a database managed by dscl(1)
- Has no useradd/groupadd
- Does not create home directories
- User password must be cleartext
- UID must be given
- System users must ben under 500
This overrides the following methods from the generic class:-
- user_exists()
- create_user()
- remove_user()
- modify_user()
"""
platform = 'Darwin'
distribution = None
SHADOWFILE = None
dscl_directory = '.'
fields = [
('comment', 'RealName'),
('home', 'NFSHomeDirectory'),
('shell', 'UserShell'),
('uid', 'UniqueID'),
('group', 'PrimaryGroupID'),
('hidden', 'IsHidden'),
]
def __init__(self, module):
super(DarwinUser, self).__init__(module)
# make the user hidden if option is set or deffer to system option
if self.hidden is None:
if self.system:
self.hidden = 1
elif self.hidden:
self.hidden = 1
else:
self.hidden = 0
# add hidden to processing if set
if self.hidden is not None:
self.fields.append(('hidden', 'IsHidden'))
def _get_dscl(self):
return [self.module.get_bin_path('dscl', True), self.dscl_directory]
def _list_user_groups(self):
cmd = self._get_dscl()
cmd += ['-search', '/Groups', 'GroupMembership', self.name]
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
groups = []
for line in out.splitlines():
if line.startswith(' ') or line.startswith(')'):
continue
groups.append(line.split()[0])
return groups
def _get_user_property(self, property):
'''Return user PROPERTY as given my dscl(1) read or None if not found.'''
cmd = self._get_dscl()
cmd += ['-read', '/Users/%s' % self.name, property]
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
if rc != 0:
return None
# from dscl(1)
# if property contains embedded spaces, the list will instead be
# displayed one entry per line, starting on the line after the key.
lines = out.splitlines()
# sys.stderr.write('*** |%s| %s -> %s\n' % (property, out, lines))
if len(lines) == 1:
return lines[0].split(': ')[1]
if len(lines) > 2:
return '\n'.join([lines[1].strip()] + lines[2:])
if len(lines) == 2:
return lines[1].strip()
return None
def _get_next_uid(self, system=None):
'''
Return the next available uid. If system=True, then
uid should be below of 500, if possible.
'''
cmd = self._get_dscl()
cmd += ['-list', '/Users', 'UniqueID']
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
if rc != 0:
self.module.fail_json(
msg="Unable to get the next available uid",
rc=rc,
out=out,
err=err
)
max_uid = 0
max_system_uid = 0
for line in out.splitlines():
current_uid = int(line.split(' ')[-1])
if max_uid < current_uid:
max_uid = current_uid
if max_system_uid < current_uid and current_uid < 500:
max_system_uid = current_uid
if system and (0 < max_system_uid < 499):
return max_system_uid + 1
return max_uid + 1
def _change_user_password(self):
'''Change password for SELF.NAME against SELF.PASSWORD.
Please note that password must be cleartext.
'''
# some documentation on how is stored passwords on OSX:
# http://blog.lostpassword.com/2012/07/cracking-mac-os-x-lion-accounts-passwords/
# http://null-byte.wonderhowto.com/how-to/hack-mac-os-x-lion-passwords-0130036/
# http://pastebin.com/RYqxi7Ca
# on OSX 10.8+ hash is SALTED-SHA512-PBKDF2
# https://pythonhosted.org/passlib/lib/passlib.hash.pbkdf2_digest.html
# https://gist.github.com/nueh/8252572
cmd = self._get_dscl()
if self.password:
cmd += ['-passwd', '/Users/%s' % self.name, self.password]
else:
cmd += ['-create', '/Users/%s' % self.name, 'Password', '*']
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Error when changing password', err=err, out=out, rc=rc)
return (rc, out, err)
def _make_group_numerical(self):
'''Convert SELF.GROUP to is stringed numerical value suitable for dscl.'''
if self.group is None:
self.group = 'nogroup'
try:
self.group = grp.getgrnam(self.group).gr_gid
except KeyError:
self.module.fail_json(msg='Group "%s" not found. Try to create it first using "group" module.' % self.group)
# We need to pass a string to dscl
self.group = str(self.group)
def __modify_group(self, group, action):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwise 'remove' is assumed. '''
if action == 'add':
option = '-a'
else:
option = '-d'
cmd = ['dseditgroup', '-o', 'edit', option, self.name, '-t', 'user', group]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot %s user "%s" to group "%s".'
% (action, self.name, group), err=err, out=out, rc=rc)
return (rc, out, err)
def _modify_group(self):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwise 'remove' is assumed. '''
rc = 0
out = ''
err = ''
changed = False
current = set(self._list_user_groups())
if self.groups is not None:
target = self.get_groups_set(names_only=True)
else:
target = set([])
if self.append is False:
for remove in current - target:
(_rc, _out, _err) = self.__modify_group(remove, 'delete')
rc += rc
out += _out
err += _err
changed = True
for add in target - current:
(_rc, _out, _err) = self.__modify_group(add, 'add')
rc += _rc
out += _out
err += _err
changed = True
return (rc, out, err, changed)
def _update_system_user(self):
'''Hide or show user on login window according SELF.SYSTEM.
Returns 0 if a change has been made, None otherwise.'''
plist_file = '/Library/Preferences/com.apple.loginwindow.plist'
# http://support.apple.com/kb/HT5017?viewlocale=en_US
cmd = ['defaults', 'read', plist_file, 'HiddenUsersList']
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
# returned value is
# (
# "_userA",
# "_UserB",
# userc
# )
hidden_users = []
for x in out.splitlines()[1:-1]:
try:
x = x.split('"')[1]
except IndexError:
x = x.strip()
hidden_users.append(x)
if self.system:
if self.name not in hidden_users:
cmd = ['defaults', 'write', plist_file, 'HiddenUsersList', '-array-add', self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot user "%s" to hidden user list.' % self.name, err=err, out=out, rc=rc)
return 0
else:
if self.name in hidden_users:
del (hidden_users[hidden_users.index(self.name)])
cmd = ['defaults', 'write', plist_file, 'HiddenUsersList', '-array'] + hidden_users
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot remove user "%s" from hidden user list.' % self.name, err=err, out=out, rc=rc)
return 0
def user_exists(self):
'''Check is SELF.NAME is a known user on the system.'''
cmd = self._get_dscl()
cmd += ['-read', '/Users/%s' % self.name, 'UniqueID']
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
return rc == 0
def remove_user(self):
'''Delete SELF.NAME. If SELF.FORCE is true, remove its home directory.'''
info = self.user_info()
cmd = self._get_dscl()
cmd += ['-delete', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot delete user "%s".' % self.name, err=err, out=out, rc=rc)
if self.force:
if os.path.exists(info[5]):
shutil.rmtree(info[5])
out += "Removed %s" % info[5]
return (rc, out, err)
def create_user(self, command_name='dscl'):
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot create user "%s".' % self.name, err=err, out=out, rc=rc)
# Make the Gecos (alias display name) default to username
if self.comment is None:
self.comment = self.name
# Make user group default to 'staff'
if self.group is None:
self.group = 'staff'
self._make_group_numerical()
if self.uid is None:
self.uid = str(self._get_next_uid(self.system))
# Homedir is not created by default
if self.create_home:
if self.home is None:
self.home = '/Users/%s' % self.name
if not self.module.check_mode:
if not os.path.exists(self.home):
os.makedirs(self.home)
self.chown_homedir(int(self.uid), int(self.group), self.home)
# dscl sets shell to /usr/bin/false when UserShell is not specified
# so set the shell to /bin/bash when the user is not a system user
if not self.system and self.shell is None:
self.shell = '/bin/bash'
for field in self.fields:
if field[0] in self.__dict__ and self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name, field[1], self.__dict__[field[0]]]
(rc, _out, _err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot add property "%s" to user "%s".' % (field[0], self.name), err=err, out=out, rc=rc)
out += _out
err += _err
if rc != 0:
return (rc, _out, _err)
(rc, _out, _err) = self._change_user_password()
out += _out
err += _err
self._update_system_user()
# here we don't care about change status since it is a creation,
# thus changed is always true.
if self.groups:
(rc, _out, _err, changed) = self._modify_group()
out += _out
err += _err
return (rc, out, err)
def modify_user(self):
changed = None
out = ''
err = ''
if self.group:
self._make_group_numerical()
for field in self.fields:
if field[0] in self.__dict__ and self.__dict__[field[0]]:
current = self._get_user_property(field[1])
if current is None or current != to_text(self.__dict__[field[0]]):
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name, field[1], self.__dict__[field[0]]]
(rc, _out, _err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot update property "%s" for user "%s".'
% (field[0], self.name), err=err, out=out, rc=rc)
changed = rc
out += _out
err += _err
if self.update_password == 'always' and self.password is not None:
(rc, _out, _err) = self._change_user_password()
out += _out
err += _err
changed = rc
if self.groups:
(rc, _out, _err, _changed) = self._modify_group()
out += _out
err += _err
if _changed is True:
changed = rc
rc = self._update_system_user()
if rc == 0:
changed = rc
return (changed, out, err)
class AIX(User):
"""
This is a AIX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
- parse_shadow_file()
"""
platform = 'AIX'
distribution = None
SHADOWFILE = '/etc/security/passwd'
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self, command_name='useradd'):
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.umask is not None:
cmd.append('-K')
cmd.append('UMASK=' + self.umask)
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.password is not None:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
self.execute_command(cmd, data="%s:%s" % (self.name, self.password))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(names_only=True)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
# skip if no changes to be made
if len(cmd) == 1:
(rc, out, err) = (None, '', '')
else:
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
(rc2, out2, err2) = self.execute_command(cmd, data="%s:%s" % (self.name, self.password))
else:
(rc2, out2, err2) = (None, '', '')
if rc is not None:
return (rc, out + out2, err + err2)
else:
return (rc2, out + out2, err + err2)
def parse_shadow_file(self):
"""Example AIX shadowfile data:
nobody:
password = *
operator1:
password = {ssha512}06$xxxxxxxxxxxx....
lastupdate = 1549558094
test1:
password = *
lastupdate = 1553695126
"""
b_name = to_bytes(self.name)
b_passwd = b''
b_expires = b''
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
with open(self.SHADOWFILE, 'rb') as bf:
b_lines = bf.readlines()
b_passwd_line = b''
b_expires_line = b''
try:
for index, b_line in enumerate(b_lines):
# Get password and lastupdate lines which come after the username
if b_line.startswith(b'%s:' % b_name):
b_passwd_line = b_lines[index + 1]
b_expires_line = b_lines[index + 2]
break
# Sanity check the lines because sometimes both are not present
if b' = ' in b_passwd_line:
b_passwd = b_passwd_line.split(b' = ', 1)[-1].strip()
if b' = ' in b_expires_line:
b_expires = b_expires_line.split(b' = ', 1)[-1].strip()
except IndexError:
self.module.fail_json(msg='Failed to parse shadow file %s' % self.SHADOWFILE)
passwd = to_native(b_passwd)
expires = to_native(b_expires) or -1
return passwd, expires
class HPUX(User):
"""
This is a HP-UX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'HP-UX'
distribution = None
SHADOWFILE = '/etc/shadow'
def create_user(self):
cmd = ['/usr/sam/lbin/useradd.sam']
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user(self):
cmd = ['/usr/sam/lbin/userdel.sam']
if self.force:
cmd.append('-F')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = ['/usr/sam/lbin/usermod.sam']
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False, names_only=True)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups = groups | set(current_groups)
cmd.append(','.join(new_groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-F')
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
class BusyBox(User):
"""
This is the BusyBox class for use on systems that have adduser, deluser,
and delgroup commands. It overrides the following methods:
- create_user()
- remove_user()
- modify_user()
"""
def create_user(self):
cmd = [self.module.get_bin_path('adduser', True)]
cmd.append('-D')
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg='Group {0} does not exist'.format(self.group))
cmd.append('-G')
cmd.append(self.group)
if self.comment is not None:
cmd.append('-g')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-h')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if not self.create_home:
cmd.append('-H')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.umask is not None:
cmd.append('-K')
cmd.append('UMASK=' + self.umask)
if self.system:
cmd.append('-S')
cmd.append(self.name)
rc, out, err = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
if self.password is not None:
cmd = [self.module.get_bin_path('chpasswd', True)]
cmd.append('--encrypted')
data = '{name}:{password}'.format(name=self.name, password=self.password)
rc, out, err = self.execute_command(cmd, data=data)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# Add to additional groups
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
add_cmd_bin = self.module.get_bin_path('adduser', True)
for group in groups:
cmd = [add_cmd_bin, self.name, group]
rc, out, err = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
return rc, out, err
def remove_user(self):
cmd = [
self.module.get_bin_path('deluser', True),
self.name
]
if self.remove:
cmd.append('--remove-home')
return self.execute_command(cmd)
def modify_user(self):
current_groups = self.user_group_membership()
groups = []
rc = None
out = ''
err = ''
info = self.user_info()
add_cmd_bin = self.module.get_bin_path('adduser', True)
remove_cmd_bin = self.module.get_bin_path('delgroup', True)
# Manage group membership
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
for g in groups:
if g in group_diff:
add_cmd = [add_cmd_bin, self.name, g]
rc, out, err = self.execute_command(add_cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
for g in group_diff:
if g not in groups and not self.append:
remove_cmd = [remove_cmd_bin, self.name, g]
rc, out, err = self.execute_command(remove_cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# Manage password
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = [self.module.get_bin_path('chpasswd', True)]
cmd.append('--encrypted')
data = '{name}:{password}'.format(name=self.name, password=self.password)
rc, out, err = self.execute_command(cmd, data=data)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
return rc, out, err
class Alpine(BusyBox):
"""
This is the Alpine User manipulation class. It inherits the BusyBox class
behaviors such as using adduser and deluser commands.
"""
platform = 'Linux'
distribution = 'Alpine'
def main():
ssh_defaults = dict(
bits=0,
type='rsa',
passphrase=None,
comment='ansible-generated on %s' % socket.gethostname()
)
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['absent', 'present']),
name=dict(type='str', required=True, aliases=['user']),
uid=dict(type='int'),
non_unique=dict(type='bool', default=False),
group=dict(type='str'),
groups=dict(type='list', elements='str'),
comment=dict(type='str'),
home=dict(type='path'),
shell=dict(type='str'),
password=dict(type='str', no_log=True),
login_class=dict(type='str'),
password_expire_max=dict(type='int', no_log=False),
password_expire_min=dict(type='int', no_log=False),
password_expire_warn=dict(type='int', no_log=False),
# following options are specific to macOS
hidden=dict(type='bool'),
# following options are specific to selinux
seuser=dict(type='str'),
# following options are specific to userdel
force=dict(type='bool', default=False),
remove=dict(type='bool', default=False),
# following options are specific to useradd
create_home=dict(type='bool', default=True, aliases=['createhome']),
skeleton=dict(type='str'),
system=dict(type='bool', default=False),
# following options are specific to usermod
move_home=dict(type='bool', default=False),
append=dict(type='bool', default=False),
# following are specific to ssh key generation
generate_ssh_key=dict(type='bool'),
ssh_key_bits=dict(type='int', default=ssh_defaults['bits']),
ssh_key_type=dict(type='str', default=ssh_defaults['type']),
ssh_key_file=dict(type='path'),
ssh_key_comment=dict(type='str', default=ssh_defaults['comment']),
ssh_key_passphrase=dict(type='str', no_log=True),
update_password=dict(type='str', default='always', choices=['always', 'on_create'], no_log=False),
expires=dict(type='float'),
password_lock=dict(type='bool', no_log=False),
local=dict(type='bool'),
profile=dict(type='str'),
authorization=dict(type='str'),
role=dict(type='str'),
umask=dict(type='str'),
),
supports_check_mode=True,
)
user = User(module)
user.check_password_encrypted()
module.debug('User instantiated - platform %s' % user.platform)
if user.distribution:
module.debug('User instantiated - distribution %s' % user.distribution)
rc = None
out = ''
err = ''
result = {}
result['name'] = user.name
result['state'] = user.state
if user.state == 'absent':
if user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.remove_user()
if rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
result['force'] = user.force
result['remove'] = user.remove
elif user.state == 'present':
if not user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
# Check to see if the provided home path contains parent directories
# that do not exist.
path_needs_parents = False
if user.home and user.create_home:
parent = os.path.dirname(user.home)
if not os.path.isdir(parent):
path_needs_parents = True
(rc, out, err) = user.create_user()
# If the home path had parent directories that needed to be created,
# make sure file permissions are correct in the created home directory.
if path_needs_parents:
info = user.user_info()
if info is not False:
user.chown_homedir(info[2], info[3], user.home)
if module.check_mode:
result['system'] = user.name
else:
result['system'] = user.system
result['create_home'] = user.create_home
else:
# modify user (note: this function is check mode aware)
(rc, out, err) = user.modify_user()
result['append'] = user.append
result['move_home'] = user.move_home
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if user.password is not None:
result['password'] = 'NOT_LOGGING_PASSWORD'
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
if user.user_exists() and user.state == 'present':
info = user.user_info()
if info is False:
result['msg'] = "failed to look up user name: %s" % user.name
result['failed'] = True
result['uid'] = info[2]
result['group'] = info[3]
result['comment'] = info[4]
result['home'] = info[5]
result['shell'] = info[6]
if user.groups is not None:
result['groups'] = user.groups
# handle missing homedirs
info = user.user_info()
if user.home is None:
user.home = info[5]
if not os.path.exists(user.home) and user.create_home:
if not module.check_mode:
user.create_homedir(user.home)
user.chown_homedir(info[2], info[3], user.home)
result['changed'] = True
# deal with ssh key
if user.sshkeygen:
# generate ssh key (note: this function is check mode aware)
(rc, out, err) = user.ssh_key_gen()
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if rc == 0:
result['changed'] = True
(rc, out, err) = user.ssh_key_fingerprint()
if rc == 0:
result['ssh_fingerprint'] = out.strip()
else:
result['ssh_fingerprint'] = err.strip()
result['ssh_key_file'] = user.get_ssh_key_path()
result['ssh_public_key'] = user.get_ssh_public_key()
(rc, out, err) = user.set_password_expire()
if rc is None:
pass # target state reached, nothing to do
else:
if rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
else:
result['changed'] = True
module.exit_json(**result)
# import module snippets
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 80,267 |
`ansible.builtin.user`: Removing an already absent local user fails or produces a huge warning
|
### Summary
When I try to ensure, that users are removed from a system, the task succeeds the first time and the next time, it fails as it can not remove the non-existing user from the `/etc/passwd` file.
This issue could be potentially solved by adding the argument `local: true` at the task, but this results in a huge warning message for every user, which should get removed and does already not exist anymore: https://github.com/ansible/ansible/blob/ad9867ca5eb8ba27f827d5d5a7999cfb96ae0986/lib/ansible/modules/user.py#L1055-L1059
So either this behaviour is buggy when using `local: false` or the warning from `local: true` should get removed (or only printed when debug is enabled).
### Issue Type
Bug Report
### Component Name
ansible.builtin.user
### Ansible Version
```console
$ ansible --version
ansible [core 2.14.3]
config file = /home/skraetzig/Git/infrastructure/ansible.cfg
configured module search path = ['/home/skraetzig/Git/infrastructure/ansible/library']
ansible python module location = /usr/local/lib/python3.9/dist-packages/ansible
ansible collection location = /usr/share/ansible/third-party/collections
executable location = /usr/local/bin/ansible
python version = 3.9.2 (default, Feb 28 2021, 17:03:44) [GCC 10.2.1 20210110] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ANSIBLE_FORCE_COLOR(env: ANSIBLE_FORCE_COLOR) = True
ANSIBLE_NOCOWS(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ANSIBLE_PIPELINING(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ANY_ERRORS_FATAL(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
COLLECTIONS_PATHS(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/usr/share/ansible/third-party/collections']
CONFIG_FILE() = /home/skraetzig/Git/infrastructure/ansible.cfg
DEFAULT_FILTER_PLUGIN_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/filter_plugins']
DEFAULT_FORKS(/home/skraetzig/Git/infrastructure/ansible.cfg) = 50
DEFAULT_LOCAL_TMP(env: ANSIBLE_LOCAL_TEMP) = /tmp/ansible-local-35zs1vlt9t
DEFAULT_MODULE_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/library']
DEFAULT_REMOTE_USER(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
DEFAULT_ROLES_PATH(/home/skraetzig/Git/infrastructure/ansible.cfg) = ['/home/skraetzig/Git/infrastructure/ansible/roles', '/home/skraetzig/Git/infrastructure/ansible/actions', '/hom>
DIFF_ALWAYS(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
DISPLAY_SKIPPED_HOSTS(env: ANSIBLE_DISPLAY_SKIPPED_HOSTS) = True
INTERPRETER_PYTHON(/home/skraetzig/Git/infrastructure/ansible.cfg) = /usr/bin/python3
MAX_FILE_SIZE_FOR_DIFF(/home/skraetzig/Git/infrastructure/ansible.cfg) = 1044480
RETRY_FILES_ENABLED(/home/skraetzig/Git/infrastructure/ansible.cfg) = False
CALLBACK:
========
default:
_______
display_ok_hosts(env: ANSIBLE_DISPLAY_OK_HOSTS) = True
display_skipped_hosts(env: ANSIBLE_DISPLAY_SKIPPED_HOSTS) = True
CONNECTION:
==========
local:
_____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
paramiko_ssh:
____________
remote_user(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
ssh_args(env: ANSIBLE_SSH_ARGS) = -C -o ControlMaster=auto -o ControlPersist=60s
psrp:
____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
ssh:
___
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
remote_user(/home/skraetzig/Git/infrastructure/ansible.cfg) = deploy
ssh_args(env: ANSIBLE_SSH_ARGS) = -C -o ControlMaster=auto -o ControlPersist=60s
winrm:
_____
pipelining(/home/skraetzig/Git/infrastructure/ansible.cfg) = True
```
### OS / Environment
Debian 10 (Buster) and 11 (Bullseye)
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- name: remove users
ansible.builtin.user:
name: "{{ item }}"
state: absent
remove: true
with_items:
- user1
- user2
- user3
```
### Expected Results
The listed users `user1`, `user2`, `user3` get successfully removed from the system, if they exist and if not, the task should be successfull without any warning.
### Actual Results
The first rollout works as expected. The users get successfully removed.
All other rollouts afterwards are then failing:
```console
TASK [users : remove users] ****************************************************
failed: [debian] (item=user1) => {"ansible_loop_var": "item", "changed": false, "item": "user1", "msg": "userdel: cannot remove entry 'user1' from /etc/passwd\n", "name": "user1", "rc": 1}
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/80267
|
https://github.com/ansible/ansible/pull/80291
|
d664f13b4a117b324f107b603e9b8e2bb9af50c5
|
e0bf76e3db3e007d039a0086276d35c28b90ff04
| 2023-03-21T20:34:12Z |
python
| 2023-11-23T14:25:35Z |
test/integration/targets/user/tasks/test_local.yml
|
## Check local mode
# Even if we don't have a system that is bound to a directory, it's useful
# to run with local: true to exercise the code path that reads through the local
# user database file.
# https://github.com/ansible/ansible/issues/50947
- name: Create /etc/gshadow
file:
path: /etc/gshadow
state: touch
when: ansible_facts.os_family == 'Suse'
tags:
- user_test_local_mode
- name: Create /etc/libuser.conf
file:
path: /etc/libuser.conf
state: touch
when:
- ansible_facts.distribution == 'Ubuntu'
- ansible_facts.distribution_major_version is version_compare('16', '==')
tags:
- user_test_local_mode
- name: Ensure luseradd is present
action: "{{ ansible_facts.pkg_mgr }}"
args:
name: libuser
state: present
when: ansible_facts.system in ['Linux']
tags:
- user_test_local_mode
- name: Create local account that already exists to check for warning
user:
name: root
local: yes
register: local_existing
tags:
- user_test_local_mode
- name: Create local_ansibulluser
user:
name: local_ansibulluser
state: present
local: yes
register: local_user_test_1
tags:
- user_test_local_mode
- name: Create local_ansibulluser again
user:
name: local_ansibulluser
state: present
local: yes
register: local_user_test_2
tags:
- user_test_local_mode
- name: Remove local_ansibulluser
user:
name: local_ansibulluser
state: absent
remove: yes
local: yes
register: local_user_test_remove_1
tags:
- user_test_local_mode
- name: Remove local_ansibulluser again
user:
name: local_ansibulluser
state: absent
remove: yes
local: yes
register: local_user_test_remove_2
tags:
- user_test_local_mode
- name: Create test groups
group:
name: "{{ item }}"
loop:
- testgroup1
- testgroup2
- testgroup3
- testgroup4
- testgroup5
- testgroup6
- local_ansibulluser
tags:
- user_test_local_mode
register: test_groups
- name: Create local_ansibulluser with groups
user:
name: local_ansibulluser
state: present
local: yes
groups: ['testgroup1', 'testgroup2']
register: local_user_test_3
ignore_errors: yes
tags:
- user_test_local_mode
- name: Append groups for local_ansibulluser
user:
name: local_ansibulluser
state: present
local: yes
groups: ['testgroup3', 'testgroup4']
append: yes
register: local_user_test_4
ignore_errors: yes
tags:
- user_test_local_mode
- name: Append groups for local_ansibulluser (again)
user:
name: local_ansibulluser
state: present
local: yes
groups: ['testgroup3', 'testgroup4']
append: yes
register: local_user_test_4_again
ignore_errors: yes
tags:
- user_test_local_mode
- name: Test append without groups for local_ansibulluser
user:
name: local_ansibulluser
state: present
append: yes
register: local_user_test_5
ignore_errors: yes
tags:
- user_test_local_mode
- name: Assign named group for local_ansibulluser
user:
name: local_ansibulluser
state: present
local: yes
group: testgroup5
register: local_user_test_6
tags:
- user_test_local_mode
- name: Append groups for local_ansibulluser using group id
user:
name: local_ansibulluser
state: present
append: yes
groups: "{{ test_groups.results[5]['gid'] }}"
register: local_user_test_7
ignore_errors: yes
tags:
- user_test_local_mode
- name: Append groups for local_ansibulluser using gid (again)
user:
name: local_ansibulluser
state: present
append: yes
groups: "{{ test_groups.results[5]['gid'] }}"
register: local_user_test_7_again
ignore_errors: yes
tags:
- user_test_local_mode
# If we don't re-assign, then "Set user expiration" will
# fail.
- name: Re-assign named group for local_ansibulluser
user:
name: local_ansibulluser
state: present
local: yes
group: local_ansibulluser
ignore_errors: yes
tags:
- user_test_local_mode
- name: Remove local_ansibulluser again
user:
name: local_ansibulluser
state: absent
remove: yes
local: yes
tags:
- user_test_local_mode
- name: Remove test groups
group:
name: "{{ item }}"
state: absent
loop:
- testgroup1
- testgroup2
- testgroup3
- testgroup4
- testgroup5
- testgroup6
- local_ansibulluser
tags:
- user_test_local_mode
- name: Ensure local user accounts were created and removed properly
assert:
that:
- local_user_test_1 is changed
- local_user_test_2 is not changed
- local_user_test_3 is changed
- local_user_test_4 is changed
- local_user_test_4_again is not changed
- local_user_test_6 is changed
- local_user_test_7 is changed
- local_user_test_7_again is not changed
- local_user_test_remove_1 is changed
- local_user_test_remove_2 is not changed
tags:
- user_test_local_mode
- name: Ensure warnings were displayed properly
assert:
that:
- local_user_test_1['warnings'] | length > 0
- local_user_test_1['warnings'] | first is search('The local user account may already exist')
- local_user_test_5['warnings'] is search("'append' is set, but no 'groups' are specified. Use 'groups'")
- local_existing['warnings'] is not defined
when: ansible_facts.system in ['Linux']
tags:
- user_test_local_mode
- name: Test expires for local users
import_tasks: test_local_expires.yml
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,024 |
Remove RHEL 9.2 from ansible-test
|
### Summary
Remove RHEL 9.2 from ansible-test after a 2-week transition period following the addition of RHEL 9.3 to ansible-test. This is a remote VM removal.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82024
|
https://github.com/ansible/ansible/pull/82211
|
e0bf76e3db3e007d039a0086276d35c28b90ff04
|
afd45aca6ada1dd21fc34a9ccb206ba1e185c883
| 2023-10-18T19:38:37Z |
python
| 2023-11-27T09:03:42Z |
changelogs/fragments/ansible-test-remove-rhel-9_2-remote.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,024 |
Remove RHEL 9.2 from ansible-test
|
### Summary
Remove RHEL 9.2 from ansible-test after a 2-week transition period following the addition of RHEL 9.3 to ansible-test. This is a remote VM removal.
### Issue Type
Feature Idea
### Component Name
`ansible-test`
|
https://github.com/ansible/ansible/issues/82024
|
https://github.com/ansible/ansible/pull/82211
|
e0bf76e3db3e007d039a0086276d35c28b90ff04
|
afd45aca6ada1dd21fc34a9ccb206ba1e185c883
| 2023-10-18T19:38:37Z |
python
| 2023-11-27T09:03:42Z |
test/lib/ansible_test/_data/completion/remote.txt
|
alpine/3.18 python=3.11 become=doas_sudo provider=aws arch=x86_64
alpine become=doas_sudo provider=aws arch=x86_64
fedora/38 python=3.11 become=sudo provider=aws arch=x86_64
fedora/39 python=3.12 become=sudo provider=aws arch=x86_64
fedora become=sudo provider=aws arch=x86_64
freebsd/13.2 python=3.9,3.11 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
freebsd python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64
macos/13.2 python=3.11 python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
macos python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64
rhel/9.2 python=3.9,3.11 become=sudo provider=aws arch=x86_64
rhel/9.3 python=3.9,3.11 become=sudo provider=aws arch=x86_64
rhel become=sudo provider=aws arch=x86_64
ubuntu/22.04 python=3.10 become=sudo provider=aws arch=x86_64
ubuntu become=sudo provider=aws arch=x86_64
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,179 |
validate-modules does not catch all argument vs docs mismatches, specifically the choices field
|
### Summary
At sanity check, if the choices option for a parameter is defined to be the same length as the choices in the document, and the choices for the parameter are repeated. 'ansible-test sanity --test validate-modules ****.py" Can't be checked, It will pass!
sample as:
```e.g.
docuemnt define as:
caching:
description:
- Type of ***** caching.
type: str
choices:
- ReadOnly
- ReadWrite
argument define as: caching=dict(type='str', choices=['ReadOnly', 'ReadOnly'])
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console
$ ansible --version
ansible [core 2.13.2]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/fred/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.8/dist-packages/ansible
ansible collection location = /home/fred/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.8.10 (default, Mar 15 2022, 12:22:08) [GCC 9.4.0]
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
null
```
### OS / Environment
Ubuntu
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
ansible-test sanity --test validate-modules ***.py
```
### Expected Results
Can't pass!
### Actual Results
```console
Check pass!
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82179
|
https://github.com/ansible/ansible/pull/82266
|
0806da55b13cbec202a6e8581340ce96f8c93ea5
|
e6e19e37f729e89060fdf313c24b91f2f1426bd3
| 2023-11-09T10:13:39Z |
python
| 2023-11-28T15:09:29Z |
test/integration/targets/ansible-test-sanity-validate-modules/ansible_collections/ns/col/plugins/modules/invalid_choice_value.py
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,179 |
validate-modules does not catch all argument vs docs mismatches, specifically the choices field
|
### Summary
At sanity check, if the choices option for a parameter is defined to be the same length as the choices in the document, and the choices for the parameter are repeated. 'ansible-test sanity --test validate-modules ****.py" Can't be checked, It will pass!
sample as:
```e.g.
docuemnt define as:
caching:
description:
- Type of ***** caching.
type: str
choices:
- ReadOnly
- ReadWrite
argument define as: caching=dict(type='str', choices=['ReadOnly', 'ReadOnly'])
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console
$ ansible --version
ansible [core 2.13.2]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/fred/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.8/dist-packages/ansible
ansible collection location = /home/fred/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.8.10 (default, Mar 15 2022, 12:22:08) [GCC 9.4.0]
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
null
```
### OS / Environment
Ubuntu
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
ansible-test sanity --test validate-modules ***.py
```
### Expected Results
Can't pass!
### Actual Results
```console
Check pass!
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82179
|
https://github.com/ansible/ansible/pull/82266
|
0806da55b13cbec202a6e8581340ce96f8c93ea5
|
e6e19e37f729e89060fdf313c24b91f2f1426bd3
| 2023-11-09T10:13:39Z |
python
| 2023-11-28T15:09:29Z |
test/integration/targets/ansible-test-sanity-validate-modules/expected.txt
|
plugins/lookup/import_order_lookup.py:5:0: import-before-documentation: Import found before documentation variables. All imports must appear below DOCUMENTATION/EXAMPLES/RETURN.
plugins/modules/check_mode_attribute_1.py:0:0: attributes-check-mode: The module does not declare support for check mode, but the check_mode attribute's support value is 'full' and not 'none'
plugins/modules/check_mode_attribute_2.py:0:0: attributes-check-mode: The module does not declare support for check mode, but the check_mode attribute's support value is 'partial' and not 'none'
plugins/modules/check_mode_attribute_3.py:0:0: attributes-check-mode: The module does declare support for check mode, but the check_mode attribute's support value is 'none'
plugins/modules/check_mode_attribute_4.py:0:0: attributes-check-mode-details: The module declares it does not fully support check mode, but has no details on what exactly that means
plugins/modules/import_order.py:7:0: import-before-documentation: Import found before documentation variables. All imports must appear below DOCUMENTATION/EXAMPLES/RETURN.
plugins/modules/invalid_yaml_syntax.py:0:0: deprecation-mismatch: "meta/runtime.yml" and DOCUMENTATION.deprecation do not agree.
plugins/modules/invalid_yaml_syntax.py:0:0: missing-documentation: No DOCUMENTATION provided
plugins/modules/invalid_yaml_syntax.py:7:15: documentation-syntax-error: DOCUMENTATION is not valid YAML
plugins/modules/invalid_yaml_syntax.py:11:15: invalid-examples: EXAMPLES is not valid YAML
plugins/modules/invalid_yaml_syntax.py:15:15: return-syntax-error: RETURN is not valid YAML
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.0: While parsing "V(C\(" at index 1: Unnecessarily escaped "(" @ data['options']['a11']['suboptions']['b1']['description'][0]. Got 'V(C\\(foo\\)).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.2: While parsing "P(foo.bar#baz)" at index 1: Plugin name "foo.bar" is not a FQCN @ data['options']['a11']['suboptions']['b1']['description'][2]. Got 'P(foo.bar#baz).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.3: While parsing "P(foo.bar.baz)" at index 1: Parameter "foo.bar.baz" is not of the form FQCN#type @ data['options']['a11']['suboptions']['b1']['description'][3]. Got 'P(foo.bar.baz).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.4: Directive "P(foo.bar.baz#woof)" must contain a valid plugin type; found "woof" @ data['options']['a11']['suboptions']['b1']['description'][4]. Got 'P(foo.bar.baz#woof).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a11.suboptions.b1.description.5: While parsing "E(foo\(" at index 1: Unnecessarily escaped "(" @ data['options']['a11']['suboptions']['b1']['description'][5]. Got 'E(foo\\(bar).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a2.description: While parsing "V(C\(" at index 1: Unnecessarily escaped "(" for dictionary value @ data['options']['a2']['description']. Got 'V(C\\(foo\\)).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a4.description: While parsing "P(foo.bar#baz)" at index 1: Plugin name "foo.bar" is not a FQCN for dictionary value @ data['options']['a4']['description']. Got 'P(foo.bar#baz).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a5.description: While parsing "P(foo.bar.baz)" at index 1: Parameter "foo.bar.baz" is not of the form FQCN#type for dictionary value @ data['options']['a5']['description']. Got 'P(foo.bar.baz).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a6.description: Directive "P(foo.bar.baz#woof)" must contain a valid plugin type; found "woof" for dictionary value @ data['options']['a6']['description']. Got 'P(foo.bar.baz#woof).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: DOCUMENTATION.options.a7.description: While parsing "E(foo\(" at index 1: Unnecessarily escaped "(" for dictionary value @ data['options']['a7']['description']. Got 'E(foo\\(bar).'
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "O(bar)" contains a non-existing option "bar"
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "O(bar=bam)" contains a non-existing option "bar"
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "O(foo.bar=1)" contains a non-existing option "foo.bar"
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "RV(bam)" contains a non-existing return value "bam"
plugins/modules/semantic_markup.py:0:0: invalid-documentation-markup: Directive "RV(does.not.exist=true)" contains a non-existing return value "does.not.exist"
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,179 |
validate-modules does not catch all argument vs docs mismatches, specifically the choices field
|
### Summary
At sanity check, if the choices option for a parameter is defined to be the same length as the choices in the document, and the choices for the parameter are repeated. 'ansible-test sanity --test validate-modules ****.py" Can't be checked, It will pass!
sample as:
```e.g.
docuemnt define as:
caching:
description:
- Type of ***** caching.
type: str
choices:
- ReadOnly
- ReadWrite
argument define as: caching=dict(type='str', choices=['ReadOnly', 'ReadOnly'])
### Issue Type
Bug Report
### Component Name
ansible-test
### Ansible Version
```console
$ ansible --version
ansible [core 2.13.2]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/fred/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.8/dist-packages/ansible
ansible collection location = /home/fred/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.8.10 (default, Mar 15 2022, 12:22:08) [GCC 9.4.0]
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
null
```
### OS / Environment
Ubuntu
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
ansible-test sanity --test validate-modules ***.py
```
### Expected Results
Can't pass!
### Actual Results
```console
Check pass!
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82179
|
https://github.com/ansible/ansible/pull/82266
|
0806da55b13cbec202a6e8581340ce96f8c93ea5
|
e6e19e37f729e89060fdf313c24b91f2f1426bd3
| 2023-11-09T10:13:39Z |
python
| 2023-11-28T15:09:29Z |
test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Matt Martz <[email protected]>
# Copyright (C) 2015 Rackspace US, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import ast
import datetime
import os
import re
import sys
from io import BytesIO, TextIOWrapper
import yaml
import yaml.reader
from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.yaml import SafeLoader
from ansible.module_utils.six import string_types
from ansible.parsing.yaml.loader import AnsibleLoader
class AnsibleTextIOWrapper(TextIOWrapper):
def write(self, s):
super(AnsibleTextIOWrapper, self).write(to_text(s, self.encoding, errors='replace'))
def find_executable(executable, cwd=None, path=None):
"""Finds the full path to the executable specified"""
match = None
real_cwd = os.getcwd()
if not cwd:
cwd = real_cwd
if os.path.dirname(executable):
target = os.path.join(cwd, executable)
if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK):
match = executable
else:
path = os.environ.get('PATH', os.path.defpath)
path_dirs = path.split(os.path.pathsep)
seen_dirs = set()
for path_dir in path_dirs:
if path_dir in seen_dirs:
continue
seen_dirs.add(path_dir)
if os.path.abspath(path_dir) == real_cwd:
path_dir = cwd
candidate = os.path.join(path_dir, executable)
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
match = candidate
break
return match
def find_globals(g, tree):
"""Uses AST to find globals in an ast tree"""
for child in tree:
if hasattr(child, 'body') and isinstance(child.body, list):
find_globals(g, child.body)
elif isinstance(child, (ast.FunctionDef, ast.ClassDef)):
g.add(child.name)
continue
elif isinstance(child, ast.Assign):
try:
g.add(child.targets[0].id)
except (IndexError, AttributeError):
pass
elif isinstance(child, ast.Import):
g.add(child.names[0].name)
elif isinstance(child, ast.ImportFrom):
for name in child.names:
g_name = name.asname or name.name
if g_name == '*':
continue
g.add(g_name)
class CaptureStd():
"""Context manager to handle capturing stderr and stdout"""
def __enter__(self):
self.sys_stdout = sys.stdout
self.sys_stderr = sys.stderr
sys.stdout = self.stdout = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stdout.encoding)
sys.stderr = self.stderr = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stderr.encoding)
return self
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.sys_stdout
sys.stderr = self.sys_stderr
def get(self):
"""Return ``(stdout, stderr)``"""
return self.stdout.buffer.getvalue(), self.stderr.buffer.getvalue()
def get_module_name_from_filename(filename, collection):
# Calculate the module's name so that relative imports work correctly
if collection:
# collection is a relative path, example: ansible_collections/my_namespace/my_collection
# filename is a relative path, example: plugins/modules/my_module.py
path = os.path.join(collection, filename)
else:
# filename is a relative path, example: lib/ansible/modules/system/ping.py
path = os.path.relpath(filename, 'lib')
name = os.path.splitext(path)[0].replace(os.path.sep, '.')
return name
def parse_yaml(value, lineno, module, name, load_all=False, ansible_loader=False):
traces = []
errors = []
data = None
if load_all:
yaml_load = yaml.load_all
else:
yaml_load = yaml.load
if ansible_loader:
loader = AnsibleLoader
else:
loader = SafeLoader
try:
data = yaml_load(value, Loader=loader)
if load_all:
data = list(data)
except yaml.MarkedYAMLError as e:
errors.append({
'msg': '%s is not valid YAML' % name,
'line': e.problem_mark.line + lineno,
'column': e.problem_mark.column + 1
})
traces.append(e)
except yaml.reader.ReaderError as e:
traces.append(e)
# TODO: Better line/column detection
errors.append({
'msg': ('%s is not valid YAML. Character '
'0x%x at position %d.' % (name, e.character, e.position)),
'line': lineno
})
except yaml.YAMLError as e:
traces.append(e)
errors.append({
'msg': '%s is not valid YAML: %s: %s' % (name, type(e), e),
'line': lineno
})
return data, errors, traces
def is_empty(value):
"""Evaluate null like values excluding False"""
if value is False:
return False
return not bool(value)
def compare_unordered_lists(a, b):
"""Safe list comparisons
Supports:
- unordered lists
- unhashable elements
"""
return len(a) == len(b) and all(x in b for x in a)
class NoArgsAnsibleModule(AnsibleModule):
"""AnsibleModule that does not actually load params. This is used to get access to the
methods within AnsibleModule without having to fake a bunch of data
"""
def _load_params(self):
self.params = {'_ansible_selinux_special_fs': [], '_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False, '_ansible_check_mode': False}
def parse_isodate(v, allow_date):
if allow_date:
if isinstance(v, datetime.date):
return v
msg = 'Expected ISO 8601 date string (YYYY-MM-DD) or YAML date'
else:
msg = 'Expected ISO 8601 date string (YYYY-MM-DD)'
if not isinstance(v, string_types):
raise ValueError(msg)
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
# we have to do things manually.
if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', v):
raise ValueError(msg)
try:
return datetime.datetime.strptime(v, '%Y-%m-%d').date()
except ValueError:
raise ValueError(msg)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,257 |
password_hash docs state salt option considered as type int
|
### Summary
The filter password_hash salt option said it is a string but is typed as an int.
https://github.com/ansible/ansible/blob/fbdb666411f0d2c833e2a74cbf35593b22abb69f/lib/ansible/plugins/filter/password_hash.yml#L22
### Issue Type
Documentation Report
### Component Name
plugins filter
### Ansible Version
```console
$ ansible --version
devel branch of ansible
```
### Configuration
```console
Viewed in the code
```
### OS / Environment
Github
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Expected Results
Possible pass of the string in the salt option.
### Actual Results
```console
fatal: [127.0.0.1]: FAILED! => {"msg": "the field 'args' has an invalid value ({'msg': \"{{ _random_string_base64.stdout | password_hash('sha512','656000','$6$') }}\"}), and could not be converted to an dict.The error was: invalid literal for int() with base 10: '$6$'\n\nThe error appears to be in '/home/alexis/ansible-test/test.yml': line 24, column 7, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n msg: \"{{ _random_string_base64 }}\"\n - name: \"debug 2\"\n ^ here\n"}
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82257
|
https://github.com/ansible/ansible/pull/82274
|
265f5e724cdda586a6f898a9cd69431549f0154c
|
322eb0f884882fd47a2beca2569b3727b5ead93b
| 2023-11-21T09:17:17Z |
python
| 2023-11-28T15:23:39Z |
lib/ansible/plugins/filter/password_hash.yml
|
DOCUMENTATION:
name: password_hash
version_added: "historical"
short_description: convert input password into password_hash
description:
- Returns a password_hash of a secret.
positional: _input
notes:
- Algorithms available might be restricted by the system.
options:
_input:
description: Secret to hash.
type: string
required: true
hashtype:
description: Hashing algorithm to use.
type: string
default: sha512
choices: [ md5, blowfish, sha256, sha512 ]
salt:
description: Secret string that is used for the hashing, if none is provided a random one can be generated.
type: int
rounds:
description: Number of encryption rounds, default varies by algorithm used.
type: int
ident:
description: Algorithm identifier.
type: string
EXAMPLES: |
# pwdhash => "$6$/bQCntzQ7VrgVcFa$VaMkmevkY1dqrx8neaenUDlVU.6L/.ojRbrnI4ID.yBHU6XON1cB422scCiXfUL5wRucMdLgJU0Fn38uoeBni/"
pwdhash: "{{ 'testing' | password_hash }}"
RETURN:
_value:
description: The resulting password hash.
type: string
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,244 |
ansible_processor_threads_per_core Ansible Facts incorrect against AMD Genoa based systems
|
### Summary
ansible_processor_threads_per_core shows incorrect information against AMD Genoa (AMD EPYC 9654P 96-Core Processor) based hosts.
Issue Description :
ansible_processor_threads_per_core returns 1 instead of 2, on a host where HT is enabled. command output of lscpu shows the right information.
Setup module output:
```
"ansible_processor_threads_per_core": 1,
```
lscpu output:
```
Thread(s) per core: 2
```
Ansible Versions:
```
ansible [core 2.11.6]
python version = 3.6.8
jinja version = 2.11.3
```
### Issue Type
Bug Report
### Component Name
yum
### Ansible Version
```console
$ ansible --version
ansible [core 2.11.6]
python version = 3.6.8
jinja version = 2.11.3
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
Nothing returned
```
### OS / Environment
CentOS (CentOS Linux release 7.9.2009 (Core))
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
ansible -i /tmp/inv all -m setup -a 'gather_subset=!all,!any,virtual,network,hardware'
### Expected Results
Expected : "ansible_processor_threads_per_core": 2,
Getting: ""ansible_processor_threads_per_core": 1,"
### Actual Results
```console
Details given as above
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82244
|
https://github.com/ansible/ansible/pull/82261
|
fd2d0ecfb7d2fbadcfd41690aeb56067c8a04f82
|
e80507af32fad1ccaa62f8e6630f9095fe253004
| 2023-11-20T06:30:45Z |
python
| 2023-11-28T15:49:52Z |
changelogs/fragments/thread_counts.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,244 |
ansible_processor_threads_per_core Ansible Facts incorrect against AMD Genoa based systems
|
### Summary
ansible_processor_threads_per_core shows incorrect information against AMD Genoa (AMD EPYC 9654P 96-Core Processor) based hosts.
Issue Description :
ansible_processor_threads_per_core returns 1 instead of 2, on a host where HT is enabled. command output of lscpu shows the right information.
Setup module output:
```
"ansible_processor_threads_per_core": 1,
```
lscpu output:
```
Thread(s) per core: 2
```
Ansible Versions:
```
ansible [core 2.11.6]
python version = 3.6.8
jinja version = 2.11.3
```
### Issue Type
Bug Report
### Component Name
yum
### Ansible Version
```console
$ ansible --version
ansible [core 2.11.6]
python version = 3.6.8
jinja version = 2.11.3
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
Nothing returned
```
### OS / Environment
CentOS (CentOS Linux release 7.9.2009 (Core))
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
ansible -i /tmp/inv all -m setup -a 'gather_subset=!all,!any,virtual,network,hardware'
### Expected Results
Expected : "ansible_processor_threads_per_core": 2,
Getting: ""ansible_processor_threads_per_core": 1,"
### Actual Results
```console
Details given as above
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82244
|
https://github.com/ansible/ansible/pull/82261
|
fd2d0ecfb7d2fbadcfd41690aeb56067c8a04f82
|
e80507af32fad1ccaa62f8e6630f9095fe253004
| 2023-11-20T06:30:45Z |
python
| 2023-11-28T15:49:52Z |
lib/ansible/module_utils/facts/hardware/linux.py
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import collections
import errno
import glob
import json
import os
import re
import sys
import time
from multiprocessing import cpu_count
from multiprocessing.pool import ThreadPool
from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.common.locale import get_best_parsable_locale
from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.text.formatters import bytes_to_human
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size
from ansible.module_utils.six import iteritems
# import this as a module to ensure we get the same module instance
from ansible.module_utils.facts import timeout
def get_partition_uuid(partname):
try:
uuids = os.listdir("/dev/disk/by-uuid")
except OSError:
return
for uuid in uuids:
dev = os.path.realpath("/dev/disk/by-uuid/" + uuid)
if dev == ("/dev/" + partname):
return uuid
return None
class LinuxHardware(Hardware):
"""
Linux-specific subclass of Hardware. Defines memory and CPU facts:
- memfree_mb
- memtotal_mb
- swapfree_mb
- swaptotal_mb
- processor (a list)
- processor_cores
- processor_count
In addition, it also defines number of DMI facts and device facts.
"""
platform = 'Linux'
# Originally only had these four as toplevelfacts
ORIGINAL_MEMORY_FACTS = frozenset(('MemTotal', 'SwapTotal', 'MemFree', 'SwapFree'))
# Now we have all of these in a dict structure
MEMORY_FACTS = ORIGINAL_MEMORY_FACTS.union(('Buffers', 'Cached', 'SwapCached'))
# regex used against findmnt output to detect bind mounts
BIND_MOUNT_RE = re.compile(r'.*\]')
# regex used against mtab content to find entries that are bind mounts
MTAB_BIND_MOUNT_RE = re.compile(r'.*bind.*"')
# regex used for replacing octal escape sequences
OCTAL_ESCAPE_RE = re.compile(r'\\[0-9]{3}')
def populate(self, collected_facts=None):
hardware_facts = {}
locale = get_best_parsable_locale(self.module)
self.module.run_command_environ_update = {'LANG': locale, 'LC_ALL': locale, 'LC_NUMERIC': locale}
cpu_facts = self.get_cpu_facts(collected_facts=collected_facts)
memory_facts = self.get_memory_facts()
dmi_facts = self.get_dmi_facts()
device_facts = self.get_device_facts()
uptime_facts = self.get_uptime_facts()
lvm_facts = self.get_lvm_facts()
mount_facts = {}
try:
mount_facts = self.get_mount_facts()
except timeout.TimeoutError:
self.module.warn("No mount facts were gathered due to timeout.")
hardware_facts.update(cpu_facts)
hardware_facts.update(memory_facts)
hardware_facts.update(dmi_facts)
hardware_facts.update(device_facts)
hardware_facts.update(uptime_facts)
hardware_facts.update(lvm_facts)
hardware_facts.update(mount_facts)
return hardware_facts
def get_memory_facts(self):
memory_facts = {}
if not os.access("/proc/meminfo", os.R_OK):
return memory_facts
memstats = {}
for line in get_file_lines("/proc/meminfo"):
data = line.split(":", 1)
key = data[0]
if key in self.ORIGINAL_MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
memory_facts["%s_mb" % key.lower()] = int(val) // 1024
if key in self.MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
memstats[key.lower()] = int(val) // 1024
if None not in (memstats.get('memtotal'), memstats.get('memfree')):
memstats['real:used'] = memstats['memtotal'] - memstats['memfree']
if None not in (memstats.get('cached'), memstats.get('memfree'), memstats.get('buffers')):
memstats['nocache:free'] = memstats['cached'] + memstats['memfree'] + memstats['buffers']
if None not in (memstats.get('memtotal'), memstats.get('nocache:free')):
memstats['nocache:used'] = memstats['memtotal'] - memstats['nocache:free']
if None not in (memstats.get('swaptotal'), memstats.get('swapfree')):
memstats['swap:used'] = memstats['swaptotal'] - memstats['swapfree']
memory_facts['memory_mb'] = {
'real': {
'total': memstats.get('memtotal'),
'used': memstats.get('real:used'),
'free': memstats.get('memfree'),
},
'nocache': {
'free': memstats.get('nocache:free'),
'used': memstats.get('nocache:used'),
},
'swap': {
'total': memstats.get('swaptotal'),
'free': memstats.get('swapfree'),
'used': memstats.get('swap:used'),
'cached': memstats.get('swapcached'),
},
}
return memory_facts
def get_cpu_facts(self, collected_facts=None):
cpu_facts = {}
collected_facts = collected_facts or {}
i = 0
vendor_id_occurrence = 0
model_name_occurrence = 0
processor_occurrence = 0
physid = 0
coreid = 0
sockets = {}
cores = {}
zp = 0
zmt = 0
xen = False
xen_paravirt = False
try:
if os.path.exists('/proc/xen'):
xen = True
else:
for line in get_file_lines('/sys/hypervisor/type'):
if line.strip() == 'xen':
xen = True
# Only interested in the first line
break
except IOError:
pass
if not os.access("/proc/cpuinfo", os.R_OK):
return cpu_facts
cpu_facts['processor'] = []
for line in get_file_lines('/proc/cpuinfo'):
data = line.split(":", 1)
key = data[0].strip()
try:
val = data[1].strip()
except IndexError:
val = ""
if xen:
if key == 'flags':
# Check for vme cpu flag, Xen paravirt does not expose this.
# Need to detect Xen paravirt because it exposes cpuinfo
# differently than Xen HVM or KVM and causes reporting of
# only a single cpu core.
if 'vme' not in val:
xen_paravirt = True
# model name is for Intel arch, Processor (mind the uppercase P)
# works for some ARM devices, like the Sheevaplug.
if key in ['model name', 'Processor', 'vendor_id', 'cpu', 'Vendor', 'processor']:
if 'processor' not in cpu_facts:
cpu_facts['processor'] = []
cpu_facts['processor'].append(val)
if key == 'vendor_id':
vendor_id_occurrence += 1
if key == 'model name':
model_name_occurrence += 1
if key == 'processor':
processor_occurrence += 1
i += 1
elif key == 'physical id':
physid = val
if physid not in sockets:
sockets[physid] = 1
elif key == 'core id':
coreid = val
if coreid not in sockets:
cores[coreid] = 1
elif key == 'cpu cores':
sockets[physid] = int(val)
elif key == 'siblings':
cores[coreid] = int(val)
# S390x classic cpuinfo
elif key == '# processors':
zp = int(val)
elif key == 'max thread id':
zmt = int(val) + 1
# SPARC
elif key == 'ncpus active':
i = int(val)
# Skip for platforms without vendor_id/model_name in cpuinfo (e.g ppc64le)
if vendor_id_occurrence > 0:
if vendor_id_occurrence == model_name_occurrence:
i = vendor_id_occurrence
# The fields for ARM CPUs do not always include 'vendor_id' or 'model name',
# and sometimes includes both 'processor' and 'Processor'.
# The fields for Power CPUs include 'processor' and 'cpu'.
# Always use 'processor' count for ARM and Power systems
if collected_facts.get('ansible_architecture', '').startswith(('armv', 'aarch', 'ppc')):
i = processor_occurrence
if collected_facts.get('ansible_architecture') == 's390x':
# getting sockets would require 5.7+ with CONFIG_SCHED_TOPOLOGY
cpu_facts['processor_count'] = 1
cpu_facts['processor_cores'] = zp // zmt
cpu_facts['processor_threads_per_core'] = zmt
cpu_facts['processor_vcpus'] = zp
cpu_facts['processor_nproc'] = zp
else:
if xen_paravirt:
cpu_facts['processor_count'] = i
cpu_facts['processor_cores'] = i
cpu_facts['processor_threads_per_core'] = 1
cpu_facts['processor_vcpus'] = i
cpu_facts['processor_nproc'] = i
else:
if sockets:
cpu_facts['processor_count'] = len(sockets)
else:
cpu_facts['processor_count'] = i
socket_values = list(sockets.values())
if socket_values and socket_values[0]:
cpu_facts['processor_cores'] = socket_values[0]
else:
cpu_facts['processor_cores'] = 1
core_values = list(cores.values())
if core_values:
cpu_facts['processor_threads_per_core'] = core_values[0] // cpu_facts['processor_cores']
else:
cpu_facts['processor_threads_per_core'] = 1 // cpu_facts['processor_cores']
cpu_facts['processor_vcpus'] = (cpu_facts['processor_threads_per_core'] *
cpu_facts['processor_count'] * cpu_facts['processor_cores'])
cpu_facts['processor_nproc'] = processor_occurrence
# if the number of processors available to the module's
# thread cannot be determined, the processor count
# reported by /proc will be the default (as previously defined)
try:
cpu_facts['processor_nproc'] = len(
os.sched_getaffinity(0)
)
except AttributeError:
# In Python < 3.3, os.sched_getaffinity() is not available
try:
cmd = get_bin_path('nproc')
except ValueError:
pass
else:
rc, out, _err = self.module.run_command(cmd)
if rc == 0:
cpu_facts['processor_nproc'] = int(out)
return cpu_facts
def get_dmi_facts(self):
''' learn dmi facts from system
Try /sys first for dmi related facts.
If that is not available, fall back to dmidecode executable '''
dmi_facts = {}
if os.path.exists('/sys/devices/virtual/dmi/id/product_name'):
# Use kernel DMI info, if available
# DMI SPEC -- https://www.dmtf.org/sites/default/files/standards/documents/DSP0134_3.2.0.pdf
FORM_FACTOR = ["Unknown", "Other", "Unknown", "Desktop",
"Low Profile Desktop", "Pizza Box", "Mini Tower", "Tower",
"Portable", "Laptop", "Notebook", "Hand Held", "Docking Station",
"All In One", "Sub Notebook", "Space-saving", "Lunch Box",
"Main Server Chassis", "Expansion Chassis", "Sub Chassis",
"Bus Expansion Chassis", "Peripheral Chassis", "RAID Chassis",
"Rack Mount Chassis", "Sealed-case PC", "Multi-system",
"CompactPCI", "AdvancedTCA", "Blade", "Blade Enclosure",
"Tablet", "Convertible", "Detachable", "IoT Gateway",
"Embedded PC", "Mini PC", "Stick PC"]
DMI_DICT = {
'bios_date': '/sys/devices/virtual/dmi/id/bios_date',
'bios_vendor': '/sys/devices/virtual/dmi/id/bios_vendor',
'bios_version': '/sys/devices/virtual/dmi/id/bios_version',
'board_asset_tag': '/sys/devices/virtual/dmi/id/board_asset_tag',
'board_name': '/sys/devices/virtual/dmi/id/board_name',
'board_serial': '/sys/devices/virtual/dmi/id/board_serial',
'board_vendor': '/sys/devices/virtual/dmi/id/board_vendor',
'board_version': '/sys/devices/virtual/dmi/id/board_version',
'chassis_asset_tag': '/sys/devices/virtual/dmi/id/chassis_asset_tag',
'chassis_serial': '/sys/devices/virtual/dmi/id/chassis_serial',
'chassis_vendor': '/sys/devices/virtual/dmi/id/chassis_vendor',
'chassis_version': '/sys/devices/virtual/dmi/id/chassis_version',
'form_factor': '/sys/devices/virtual/dmi/id/chassis_type',
'product_name': '/sys/devices/virtual/dmi/id/product_name',
'product_serial': '/sys/devices/virtual/dmi/id/product_serial',
'product_uuid': '/sys/devices/virtual/dmi/id/product_uuid',
'product_version': '/sys/devices/virtual/dmi/id/product_version',
'system_vendor': '/sys/devices/virtual/dmi/id/sys_vendor',
}
for (key, path) in DMI_DICT.items():
data = get_file_content(path)
if data is not None:
if key == 'form_factor':
try:
dmi_facts['form_factor'] = FORM_FACTOR[int(data)]
except IndexError:
dmi_facts['form_factor'] = 'unknown (%s)' % data
else:
dmi_facts[key] = data
else:
dmi_facts[key] = 'NA'
else:
# Fall back to using dmidecode, if available
dmi_bin = self.module.get_bin_path('dmidecode')
DMI_DICT = {
'bios_date': 'bios-release-date',
'bios_vendor': 'bios-vendor',
'bios_version': 'bios-version',
'board_asset_tag': 'baseboard-asset-tag',
'board_name': 'baseboard-product-name',
'board_serial': 'baseboard-serial-number',
'board_vendor': 'baseboard-manufacturer',
'board_version': 'baseboard-version',
'chassis_asset_tag': 'chassis-asset-tag',
'chassis_serial': 'chassis-serial-number',
'chassis_vendor': 'chassis-manufacturer',
'chassis_version': 'chassis-version',
'form_factor': 'chassis-type',
'product_name': 'system-product-name',
'product_serial': 'system-serial-number',
'product_uuid': 'system-uuid',
'product_version': 'system-version',
'system_vendor': 'system-manufacturer',
}
for (k, v) in DMI_DICT.items():
if dmi_bin is not None:
(rc, out, err) = self.module.run_command('%s -s %s' % (dmi_bin, v))
if rc == 0:
# Strip out commented lines (specific dmidecode output)
thisvalue = ''.join([line for line in out.splitlines() if not line.startswith('#')])
try:
json.dumps(thisvalue)
except UnicodeDecodeError:
thisvalue = "NA"
dmi_facts[k] = thisvalue
else:
dmi_facts[k] = 'NA'
else:
dmi_facts[k] = 'NA'
return dmi_facts
def _run_lsblk(self, lsblk_path):
# call lsblk and collect all uuids
# --exclude 2 makes lsblk ignore floppy disks, which are slower to answer than typical timeouts
# this uses the linux major device number
# for details see https://www.kernel.org/doc/Documentation/devices.txt
args = ['--list', '--noheadings', '--paths', '--output', 'NAME,UUID', '--exclude', '2']
cmd = [lsblk_path] + args
rc, out, err = self.module.run_command(cmd)
return rc, out, err
def _lsblk_uuid(self):
uuids = {}
lsblk_path = self.module.get_bin_path("lsblk")
if not lsblk_path:
return uuids
rc, out, err = self._run_lsblk(lsblk_path)
if rc != 0:
return uuids
# each line will be in format:
# <devicename><some whitespace><uuid>
# /dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
for lsblk_line in out.splitlines():
if not lsblk_line:
continue
line = lsblk_line.strip()
fields = line.rsplit(None, 1)
if len(fields) < 2:
continue
device_name, uuid = fields[0].strip(), fields[1].strip()
if device_name in uuids:
continue
uuids[device_name] = uuid
return uuids
def _udevadm_uuid(self, device):
# fallback for versions of lsblk <= 2.23 that don't have --paths, see _run_lsblk() above
uuid = 'N/A'
udevadm_path = self.module.get_bin_path('udevadm')
if not udevadm_path:
return uuid
cmd = [udevadm_path, 'info', '--query', 'property', '--name', device]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
return uuid
# a snippet of the output of the udevadm command below will be:
# ...
# ID_FS_TYPE=ext4
# ID_FS_USAGE=filesystem
# ID_FS_UUID=57b1a3e7-9019-4747-9809-7ec52bba9179
# ...
m = re.search('ID_FS_UUID=(.*)\n', out)
if m:
uuid = m.group(1)
return uuid
def _run_findmnt(self, findmnt_path):
args = ['--list', '--noheadings', '--notruncate']
cmd = [findmnt_path] + args
rc, out, err = self.module.run_command(cmd, errors='surrogate_then_replace')
return rc, out, err
def _find_bind_mounts(self):
bind_mounts = set()
findmnt_path = self.module.get_bin_path("findmnt")
if not findmnt_path:
return bind_mounts
rc, out, err = self._run_findmnt(findmnt_path)
if rc != 0:
return bind_mounts
# find bind mounts, in case /etc/mtab is a symlink to /proc/mounts
for line in out.splitlines():
fields = line.split()
# fields[0] is the TARGET, fields[1] is the SOURCE
if len(fields) < 2:
continue
# bind mounts will have a [/directory_name] in the SOURCE column
if self.BIND_MOUNT_RE.match(fields[1]):
bind_mounts.add(fields[0])
return bind_mounts
def _mtab_entries(self):
mtab_file = '/etc/mtab'
if not os.path.exists(mtab_file):
mtab_file = '/proc/mounts'
mtab = get_file_content(mtab_file, '')
mtab_entries = []
for line in mtab.splitlines():
fields = line.split()
if len(fields) < 4:
continue
mtab_entries.append(fields)
return mtab_entries
@staticmethod
def _replace_octal_escapes_helper(match):
# Convert to integer using base8 and then convert to character
return chr(int(match.group()[1:], 8))
def _replace_octal_escapes(self, value):
return self.OCTAL_ESCAPE_RE.sub(self._replace_octal_escapes_helper, value)
def get_mount_info(self, mount, device, uuids):
mount_size = get_mount_size(mount)
# _udevadm_uuid is a fallback for versions of lsblk <= 2.23 that don't have --paths
# see _run_lsblk() above
# https://github.com/ansible/ansible/issues/36077
uuid = uuids.get(device, self._udevadm_uuid(device))
return mount_size, uuid
def get_mount_facts(self):
mounts = []
# gather system lists
bind_mounts = self._find_bind_mounts()
uuids = self._lsblk_uuid()
mtab_entries = self._mtab_entries()
# start threads to query each mount
results = {}
pool = ThreadPool(processes=min(len(mtab_entries), cpu_count()))
maxtime = timeout.GATHER_TIMEOUT or timeout.DEFAULT_GATHER_TIMEOUT
for fields in mtab_entries:
# Transform octal escape sequences
fields = [self._replace_octal_escapes(field) for field in fields]
device, mount, fstype, options = fields[0], fields[1], fields[2], fields[3]
dump, passno = int(fields[4]), int(fields[5])
if not device.startswith(('/', '\\')) and ':/' not in device or fstype == 'none':
continue
mount_info = {'mount': mount,
'device': device,
'fstype': fstype,
'options': options,
'dump': dump,
'passno': passno}
if mount in bind_mounts:
# only add if not already there, we might have a plain /etc/mtab
if not self.MTAB_BIND_MOUNT_RE.match(options):
mount_info['options'] += ",bind"
results[mount] = {'info': mount_info,
'extra': pool.apply_async(self.get_mount_info, (mount, device, uuids)),
'timelimit': time.time() + maxtime}
pool.close() # done with new workers, start gc
# wait for workers and get results
while results:
for mount in list(results):
done = False
res = results[mount]['extra']
try:
if res.ready():
done = True
if res.successful():
mount_size, uuid = res.get()
if mount_size:
results[mount]['info'].update(mount_size)
results[mount]['info']['uuid'] = uuid or 'N/A'
else:
# failed, try to find out why, if 'res.successful' we know there are no exceptions
results[mount]['info']['note'] = 'Could not get extra information: %s.' % (to_text(res.get()))
elif time.time() > results[mount]['timelimit']:
done = True
self.module.warn("Timeout exceeded when getting mount info for %s" % mount)
results[mount]['info']['note'] = 'Could not get extra information due to timeout'
except Exception as e:
import traceback
done = True
results[mount]['info'] = 'N/A'
self.module.warn("Error prevented getting extra info for mount %s: [%s] %s." % (mount, type(e), to_text(e)))
self.module.debug(traceback.format_exc())
if done:
# move results outside and make loop only handle pending
mounts.append(results[mount]['info'])
del results[mount]
# avoid cpu churn, sleep between retrying for loop with remaining mounts
time.sleep(0.1)
return {'mounts': mounts}
def get_device_links(self, link_dir):
if not os.path.exists(link_dir):
return {}
try:
retval = collections.defaultdict(set)
for entry in os.listdir(link_dir):
try:
target = os.path.basename(os.readlink(os.path.join(link_dir, entry)))
retval[target].add(entry)
except OSError:
continue
return dict((k, list(sorted(v))) for (k, v) in iteritems(retval))
except OSError:
return {}
def get_all_device_owners(self):
try:
retval = collections.defaultdict(set)
for path in glob.glob('/sys/block/*/slaves/*'):
elements = path.split('/')
device = elements[3]
target = elements[5]
retval[target].add(device)
return dict((k, list(sorted(v))) for (k, v) in iteritems(retval))
except OSError:
return {}
def get_all_device_links(self):
return {
'ids': self.get_device_links('/dev/disk/by-id'),
'uuids': self.get_device_links('/dev/disk/by-uuid'),
'labels': self.get_device_links('/dev/disk/by-label'),
'masters': self.get_all_device_owners(),
}
def get_holders(self, block_dev_dict, sysdir):
block_dev_dict['holders'] = []
if os.path.isdir(sysdir + "/holders"):
for folder in os.listdir(sysdir + "/holders"):
if not folder.startswith("dm-"):
continue
name = get_file_content(sysdir + "/holders/" + folder + "/dm/name")
if name:
block_dev_dict['holders'].append(name)
else:
block_dev_dict['holders'].append(folder)
def _get_sg_inq_serial(self, sg_inq, block):
device = "/dev/%s" % (block)
rc, drivedata, err = self.module.run_command([sg_inq, device])
if rc == 0:
serial = re.search(r"(?:Unit serial|Serial) number:\s+(\w+)", drivedata)
if serial:
return serial.group(1)
def get_device_facts(self):
device_facts = {}
device_facts['devices'] = {}
lspci = self.module.get_bin_path('lspci')
if lspci:
rc, pcidata, err = self.module.run_command([lspci, '-D'], errors='surrogate_then_replace')
else:
pcidata = None
try:
block_devs = os.listdir("/sys/block")
except OSError:
return device_facts
devs_wwn = {}
try:
devs_by_id = os.listdir("/dev/disk/by-id")
except OSError:
pass
else:
for link_name in devs_by_id:
if link_name.startswith("wwn-"):
try:
wwn_link = os.readlink(os.path.join("/dev/disk/by-id", link_name))
except OSError:
continue
devs_wwn[os.path.basename(wwn_link)] = link_name[4:]
links = self.get_all_device_links()
device_facts['device_links'] = links
for block in block_devs:
virtual = 1
sysfs_no_links = 0
try:
path = os.readlink(os.path.join("/sys/block/", block))
except OSError:
e = sys.exc_info()[1]
if e.errno == errno.EINVAL:
path = block
sysfs_no_links = 1
else:
continue
sysdir = os.path.join("/sys/block", path)
if sysfs_no_links == 1:
for folder in os.listdir(sysdir):
if "device" in folder:
virtual = 0
break
d = {}
d['virtual'] = virtual
d['links'] = {}
for (link_type, link_values) in iteritems(links):
d['links'][link_type] = link_values.get(block, [])
diskname = os.path.basename(sysdir)
for key in ['vendor', 'model', 'sas_address', 'sas_device_handle']:
d[key] = get_file_content(sysdir + "/device/" + key)
sg_inq = self.module.get_bin_path('sg_inq')
# we can get NVMe device's serial number from /sys/block/<name>/device/serial
serial_path = "/sys/block/%s/device/serial" % (block)
if sg_inq:
serial = self._get_sg_inq_serial(sg_inq, block)
if serial:
d['serial'] = serial
else:
serial = get_file_content(serial_path)
if serial:
d['serial'] = serial
for key, test in [('removable', '/removable'),
('support_discard', '/queue/discard_granularity'),
]:
d[key] = get_file_content(sysdir + test)
if diskname in devs_wwn:
d['wwn'] = devs_wwn[diskname]
d['partitions'] = {}
for folder in os.listdir(sysdir):
m = re.search("(" + diskname + r"[p]?\d+)", folder)
if m:
part = {}
partname = m.group(1)
part_sysdir = sysdir + "/" + partname
part['links'] = {}
for (link_type, link_values) in iteritems(links):
part['links'][link_type] = link_values.get(partname, [])
part['start'] = get_file_content(part_sysdir + "/start", 0)
part['sectors'] = get_file_content(part_sysdir + "/size", 0)
part['sectorsize'] = get_file_content(part_sysdir + "/queue/logical_block_size")
if not part['sectorsize']:
part['sectorsize'] = get_file_content(part_sysdir + "/queue/hw_sector_size", 512)
part['size'] = bytes_to_human((float(part['sectors']) * 512.0))
part['uuid'] = get_partition_uuid(partname)
self.get_holders(part, part_sysdir)
d['partitions'][partname] = part
d['rotational'] = get_file_content(sysdir + "/queue/rotational")
d['scheduler_mode'] = ""
scheduler = get_file_content(sysdir + "/queue/scheduler")
if scheduler is not None:
m = re.match(r".*?(\[(.*)\])", scheduler)
if m:
d['scheduler_mode'] = m.group(2)
d['sectors'] = get_file_content(sysdir + "/size")
if not d['sectors']:
d['sectors'] = 0
d['sectorsize'] = get_file_content(sysdir + "/queue/logical_block_size")
if not d['sectorsize']:
d['sectorsize'] = get_file_content(sysdir + "/queue/hw_sector_size", 512)
d['size'] = bytes_to_human(float(d['sectors']) * 512.0)
d['host'] = ""
# domains are numbered (0 to ffff), bus (0 to ff), slot (0 to 1f), and function (0 to 7).
m = re.match(r".+/([a-f0-9]{4}:[a-f0-9]{2}:[0|1][a-f0-9]\.[0-7])/", sysdir)
if m and pcidata:
pciid = m.group(1)
did = re.escape(pciid)
m = re.search("^" + did + r"\s(.*)$", pcidata, re.MULTILINE)
if m:
d['host'] = m.group(1)
self.get_holders(d, sysdir)
device_facts['devices'][diskname] = d
return device_facts
def get_uptime_facts(self):
uptime_facts = {}
uptime_file_content = get_file_content('/proc/uptime')
if uptime_file_content:
uptime_seconds_string = uptime_file_content.split(' ')[0]
uptime_facts['uptime_seconds'] = int(float(uptime_seconds_string))
return uptime_facts
def _find_mapper_device_name(self, dm_device):
dm_prefix = '/dev/dm-'
mapper_device = dm_device
if dm_device.startswith(dm_prefix):
dmsetup_cmd = self.module.get_bin_path('dmsetup', True)
mapper_prefix = '/dev/mapper/'
rc, dm_name, err = self.module.run_command("%s info -C --noheadings -o name %s" % (dmsetup_cmd, dm_device))
if rc == 0:
mapper_device = mapper_prefix + dm_name.rstrip()
return mapper_device
def get_lvm_facts(self):
""" Get LVM Facts if running as root and lvm utils are available """
lvm_facts = {'lvm': 'N/A'}
if os.getuid() == 0 and self.module.get_bin_path('vgs'):
lvm_util_options = '--noheadings --nosuffix --units g --separator ,'
vgs_path = self.module.get_bin_path('vgs')
# vgs fields: VG #PV #LV #SN Attr VSize VFree
vgs = {}
if vgs_path:
rc, vg_lines, err = self.module.run_command('%s %s' % (vgs_path, lvm_util_options))
for vg_line in vg_lines.splitlines():
items = vg_line.strip().split(',')
vgs[items[0]] = {'size_g': items[-2],
'free_g': items[-1],
'num_lvs': items[2],
'num_pvs': items[1]}
lvs_path = self.module.get_bin_path('lvs')
# lvs fields:
# LV VG Attr LSize Pool Origin Data% Move Log Copy% Convert
lvs = {}
if lvs_path:
rc, lv_lines, err = self.module.run_command('%s %s' % (lvs_path, lvm_util_options))
for lv_line in lv_lines.splitlines():
items = lv_line.strip().split(',')
lvs[items[0]] = {'size_g': items[3], 'vg': items[1]}
pvs_path = self.module.get_bin_path('pvs')
# pvs fields: PV VG #Fmt #Attr PSize PFree
pvs = {}
if pvs_path:
rc, pv_lines, err = self.module.run_command('%s %s' % (pvs_path, lvm_util_options))
for pv_line in pv_lines.splitlines():
items = pv_line.strip().split(',')
pvs[self._find_mapper_device_name(items[0])] = {
'size_g': items[4],
'free_g': items[5],
'vg': items[1]}
lvm_facts['lvm'] = {'lvs': lvs, 'vgs': vgs, 'pvs': pvs}
return lvm_facts
class LinuxHardwareCollector(HardwareCollector):
_platform = 'Linux'
_fact_class = LinuxHardware
required_facts = set(['platform'])
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,710 |
Configurable sampling/transfer of control-side task context metadata to targets
|
### Summary
We're often asked how to include arbitrary control-side contextual metadata with task invocations, and to include that metadata in target-side task log messages. e.g.: sending an AWX/Controller Job ID to the target hosts on each module invocation that occurred from that job, and logging it in the module-generated syslog/Windows Application Event Log entries for future correlation with the owning job.
I've not seen any consensus on precisely *which* data to include; one person's "critical forensic correlation data" is another's "unacceptable disclosure of sensitive execution detail". Seems like we'd need a generic facility to specify environment vars and/or hostvars to sample on the control host to be included with task invocations (under a reserved dictionary arg), and adjust the module logging APIs to include them.
My initial thought is to define a new core config element (defaulting to none) that allows the user to define a templated expression that would be rendered as part of each task's templating under a host context. The rendered result would be sent to modules as a new reserved internal module var. The module logging APIs would then include this value verbatim, when present. Other module code would also have access to the value, which could be used for anything. The new config would be settable either via ansible.cfg or an envvar, making it easier for AWX/Controller to later provide a mechanism to configure it for jobs using core versions that support it, while older versions would just silently ignore it.
Maybe something like:
```
ANSIBLE_ADDITIONAL_TASK_CONTEXT='{{awx_job_id}}'
```
When this config is non-empty, the defined template would be rendered for each task/host invocation, and its result included in a new `_ansible_additional_task_context` reserved module var. The resulting value, as with any Ansible template expression, could be of arbitrary complexity (eg, returning a data structure instead of just a scalar). The module logging APIs would include the serialized value verbatim in log messages when it is present, eg "ansible_additional_task_context=(whatever the value was)".
### Issue Type
Feature Idea
### Component Name
module invocation and logging
### Additional Information
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81710
|
https://github.com/ansible/ansible/pull/81711
|
4208bdbbcd994251579409ad533b40c9b0543550
|
1dd0d6fad70d7d4f423dac41822da65ff9ec95ef
| 2023-09-18T16:35:01Z |
python
| 2023-11-30T18:12:55Z |
changelogs/fragments/log_id.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,710 |
Configurable sampling/transfer of control-side task context metadata to targets
|
### Summary
We're often asked how to include arbitrary control-side contextual metadata with task invocations, and to include that metadata in target-side task log messages. e.g.: sending an AWX/Controller Job ID to the target hosts on each module invocation that occurred from that job, and logging it in the module-generated syslog/Windows Application Event Log entries for future correlation with the owning job.
I've not seen any consensus on precisely *which* data to include; one person's "critical forensic correlation data" is another's "unacceptable disclosure of sensitive execution detail". Seems like we'd need a generic facility to specify environment vars and/or hostvars to sample on the control host to be included with task invocations (under a reserved dictionary arg), and adjust the module logging APIs to include them.
My initial thought is to define a new core config element (defaulting to none) that allows the user to define a templated expression that would be rendered as part of each task's templating under a host context. The rendered result would be sent to modules as a new reserved internal module var. The module logging APIs would then include this value verbatim, when present. Other module code would also have access to the value, which could be used for anything. The new config would be settable either via ansible.cfg or an envvar, making it easier for AWX/Controller to later provide a mechanism to configure it for jobs using core versions that support it, while older versions would just silently ignore it.
Maybe something like:
```
ANSIBLE_ADDITIONAL_TASK_CONTEXT='{{awx_job_id}}'
```
When this config is non-empty, the defined template would be rendered for each task/host invocation, and its result included in a new `_ansible_additional_task_context` reserved module var. The resulting value, as with any Ansible template expression, could be of arbitrary complexity (eg, returning a data structure instead of just a scalar). The module logging APIs would include the serialized value verbatim in log messages when it is present, eg "ansible_additional_task_context=(whatever the value was)".
### Issue Type
Feature Idea
### Component Name
module invocation and logging
### Additional Information
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81710
|
https://github.com/ansible/ansible/pull/81711
|
4208bdbbcd994251579409ad533b40c9b0543550
|
1dd0d6fad70d7d4f423dac41822da65ff9ec95ef
| 2023-09-18T16:35:01Z |
python
| 2023-11-30T18:12:55Z |
lib/ansible/config/base.yml
|
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
ANSIBLE_HOME:
name: The Ansible home path
description:
- The default root path for Ansible config files on the controller.
default: ~/.ansible
env:
- name: ANSIBLE_HOME
ini:
- key: home
section: defaults
type: path
version_added: '2.14'
ANSIBLE_CONNECTION_PATH:
name: Path of ansible-connection script
default: null
description:
- Specify where to look for the ansible-connection script. This location will be checked before searching $PATH.
- If null, ansible will start with the same directory as the ansible script.
type: path
env: [{name: ANSIBLE_CONNECTION_PATH}]
ini:
- {key: ansible_connection_path, section: persistent_connection}
yaml: {key: persistent_connection.ansible_connection_path}
version_added: "2.8"
ANSIBLE_COW_SELECTION:
name: Cowsay filter selection
default: default
description: This allows you to choose a specific cowsay stencil for the banners or use 'random' to cycle through them.
env: [{name: ANSIBLE_COW_SELECTION}]
ini:
- {key: cow_selection, section: defaults}
ANSIBLE_COW_ACCEPTLIST:
name: Cowsay filter acceptance list
default: ['bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'dragon', 'elephant-in-snake', 'elephant', 'eyes', 'hellokitty', 'kitty', 'luke-koala', 'meow', 'milk', 'moofasa', 'moose', 'ren', 'sheep', 'small', 'stegosaurus', 'stimpy', 'supermilker', 'three-eyes', 'turkey', 'turtle', 'tux', 'udder', 'vader-koala', 'vader', 'www']
description: Accept a list of cowsay templates that are 'safe' to use, set to an empty list if you want to enable all installed templates.
env:
- name: ANSIBLE_COW_ACCEPTLIST
version_added: '2.11'
ini:
- key: cowsay_enabled_stencils
section: defaults
version_added: '2.11'
type: list
ANSIBLE_FORCE_COLOR:
name: Force color output
default: False
description: This option forces color mode even when running without a TTY or the "nocolor" setting is True.
env: [{name: ANSIBLE_FORCE_COLOR}]
ini:
- {key: force_color, section: defaults}
type: boolean
yaml: {key: display.force_color}
ANSIBLE_NOCOLOR:
name: Suppress color output
default: False
description: This setting allows suppressing colorizing output, which is used to give a better indication of failure and status information.
env:
- name: ANSIBLE_NOCOLOR
# this is generic convention for CLI programs
- name: NO_COLOR
version_added: '2.11'
ini:
- {key: nocolor, section: defaults}
type: boolean
yaml: {key: display.nocolor}
ANSIBLE_NOCOWS:
name: Suppress cowsay output
default: False
description: If you have cowsay installed but want to avoid the 'cows' (why????), use this.
env: [{name: ANSIBLE_NOCOWS}]
ini:
- {key: nocows, section: defaults}
type: boolean
yaml: {key: display.i_am_no_fun}
ANSIBLE_COW_PATH:
name: Set path to cowsay command
default: null
description: Specify a custom cowsay path or swap in your cowsay implementation of choice.
env: [{name: ANSIBLE_COW_PATH}]
ini:
- {key: cowpath, section: defaults}
type: string
yaml: {key: display.cowpath}
ANSIBLE_PIPELINING:
name: Connection pipelining
default: False
description:
- This is a global option, each connection plugin can override either by having more specific options or not supporting pipelining at all.
- Pipelining, if supported by the connection plugin, reduces the number of network operations required to execute a module on the remote server,
by executing many Ansible modules without actual file transfer.
- It can result in a very significant performance improvement when enabled.
- "However this conflicts with privilege escalation (become). For example, when using 'sudo:' operations you must first
disable 'requiretty' in /etc/sudoers on all managed hosts, which is why it is disabled by default."
- This setting will be disabled if ``ANSIBLE_KEEP_REMOTE_FILES`` is enabled.
env:
- name: ANSIBLE_PIPELINING
ini:
- section: defaults
key: pipelining
- section: connection
key: pipelining
type: boolean
ANY_ERRORS_FATAL:
name: Make Task failures fatal
default: False
description: Sets the default value for the any_errors_fatal keyword, if True, Task failures will be considered fatal errors.
env:
- name: ANSIBLE_ANY_ERRORS_FATAL
ini:
- section: defaults
key: any_errors_fatal
type: boolean
yaml: {key: errors.any_task_errors_fatal}
version_added: "2.4"
BECOME_ALLOW_SAME_USER:
name: Allow becoming the same user
default: False
description:
- When ``False``(default), Ansible will skip using become if the remote user is the same as the become user, as this is normally a redundant operation.
In other words root sudo to root.
- If ``True``, this forces Ansible to use the become plugin anyways as there are cases in which this is needed.
env: [{name: ANSIBLE_BECOME_ALLOW_SAME_USER}]
ini:
- {key: become_allow_same_user, section: privilege_escalation}
type: boolean
yaml: {key: privilege_escalation.become_allow_same_user}
BECOME_PASSWORD_FILE:
name: Become password file
default: ~
description:
- 'The password file to use for the become plugin. ``--become-password-file``.'
- If executable, it will be run and the resulting stdout will be used as the password.
env: [{name: ANSIBLE_BECOME_PASSWORD_FILE}]
ini:
- {key: become_password_file, section: defaults}
type: path
version_added: '2.12'
AGNOSTIC_BECOME_PROMPT:
name: Display an agnostic become prompt
default: True
type: boolean
description: Display an agnostic become prompt instead of displaying a prompt containing the command line supplied become method.
env: [{name: ANSIBLE_AGNOSTIC_BECOME_PROMPT}]
ini:
- {key: agnostic_become_prompt, section: privilege_escalation}
yaml: {key: privilege_escalation.agnostic_become_prompt}
version_added: "2.5"
CACHE_PLUGIN:
name: Persistent Cache plugin
default: memory
description: Chooses which cache plugin to use, the default 'memory' is ephemeral.
env: [{name: ANSIBLE_CACHE_PLUGIN}]
ini:
- {key: fact_caching, section: defaults}
yaml: {key: facts.cache.plugin}
CACHE_PLUGIN_CONNECTION:
name: Cache Plugin URI
default: ~
description: Defines connection or path information for the cache plugin.
env: [{name: ANSIBLE_CACHE_PLUGIN_CONNECTION}]
ini:
- {key: fact_caching_connection, section: defaults}
yaml: {key: facts.cache.uri}
CACHE_PLUGIN_PREFIX:
name: Cache Plugin table prefix
default: ansible_facts
description: Prefix to use for cache plugin files/tables.
env: [{name: ANSIBLE_CACHE_PLUGIN_PREFIX}]
ini:
- {key: fact_caching_prefix, section: defaults}
yaml: {key: facts.cache.prefix}
CACHE_PLUGIN_TIMEOUT:
name: Cache Plugin expiration timeout
default: 86400
description: Expiration timeout for the cache plugin data.
env: [{name: ANSIBLE_CACHE_PLUGIN_TIMEOUT}]
ini:
- {key: fact_caching_timeout, section: defaults}
type: integer
yaml: {key: facts.cache.timeout}
COLLECTIONS_SCAN_SYS_PATH:
name: Scan PYTHONPATH for installed collections
description: A boolean to enable or disable scanning the sys.path for installed collections.
default: true
type: boolean
env:
- {name: ANSIBLE_COLLECTIONS_SCAN_SYS_PATH}
ini:
- {key: collections_scan_sys_path, section: defaults}
COLLECTIONS_PATHS:
name: An ordered list of root paths for loading installed Ansible collections content.
description: >
Colon-separated paths in which Ansible will search for collections content.
Collections must be in nested *subdirectories*, not directly in these directories.
For example, if ``COLLECTIONS_PATHS`` includes ``'{{ ANSIBLE_HOME ~ "/collections" }}'``,
and you want to add ``my.collection`` to that directory, it must be saved as
``'{{ ANSIBLE_HOME} ~ "/collections/ansible_collections/my/collection" }}'``.
default: '{{ ANSIBLE_HOME ~ "/collections:/usr/share/ansible/collections" }}'
type: pathspec
env:
- name: ANSIBLE_COLLECTIONS_PATHS
deprecated:
why: does not fit var naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead
version: "2.19"
- name: ANSIBLE_COLLECTIONS_PATH
version_added: '2.10'
ini:
- key: collections_paths
section: defaults
deprecated:
why: does not fit var naming standard, use the singular form collections_path instead
version: "2.19"
- key: collections_path
section: defaults
version_added: '2.10'
COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH:
name: Defines behavior when loading a collection that does not support the current Ansible version
description:
- When a collection is loaded that does not support the running Ansible version (with the collection metadata key `requires_ansible`).
env: [{name: ANSIBLE_COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH}]
ini: [{key: collections_on_ansible_version_mismatch, section: defaults}]
choices: &basic_error
error: issue a 'fatal' error and stop the play
warning: issue a warning but continue
ignore: just continue silently
default: warning
COLOR_CHANGED:
name: Color for 'changed' task status
default: yellow
description: Defines the color to use on 'Changed' task status.
env: [{name: ANSIBLE_COLOR_CHANGED}]
ini:
- {key: changed, section: colors}
COLOR_CONSOLE_PROMPT:
name: "Color for ansible-console's prompt task status"
default: white
description: Defines the default color to use for ansible-console.
env: [{name: ANSIBLE_COLOR_CONSOLE_PROMPT}]
ini:
- {key: console_prompt, section: colors}
version_added: "2.7"
COLOR_DEBUG:
name: Color for debug statements
default: dark gray
description: Defines the color to use when emitting debug messages.
env: [{name: ANSIBLE_COLOR_DEBUG}]
ini:
- {key: debug, section: colors}
COLOR_DEPRECATE:
name: Color for deprecation messages
default: purple
description: Defines the color to use when emitting deprecation messages.
env: [{name: ANSIBLE_COLOR_DEPRECATE}]
ini:
- {key: deprecate, section: colors}
COLOR_DIFF_ADD:
name: Color for diff added display
default: green
description: Defines the color to use when showing added lines in diffs.
env: [{name: ANSIBLE_COLOR_DIFF_ADD}]
ini:
- {key: diff_add, section: colors}
yaml: {key: display.colors.diff.add}
COLOR_DIFF_LINES:
name: Color for diff lines display
default: cyan
description: Defines the color to use when showing diffs.
env: [{name: ANSIBLE_COLOR_DIFF_LINES}]
ini:
- {key: diff_lines, section: colors}
COLOR_DIFF_REMOVE:
name: Color for diff removed display
default: red
description: Defines the color to use when showing removed lines in diffs.
env: [{name: ANSIBLE_COLOR_DIFF_REMOVE}]
ini:
- {key: diff_remove, section: colors}
COLOR_ERROR:
name: Color for error messages
default: red
description: Defines the color to use when emitting error messages.
env: [{name: ANSIBLE_COLOR_ERROR}]
ini:
- {key: error, section: colors}
yaml: {key: colors.error}
COLOR_HIGHLIGHT:
name: Color for highlighting
default: white
description: Defines the color to use for highlighting.
env: [{name: ANSIBLE_COLOR_HIGHLIGHT}]
ini:
- {key: highlight, section: colors}
COLOR_OK:
name: Color for 'ok' task status
default: green
description: Defines the color to use when showing 'OK' task status.
env: [{name: ANSIBLE_COLOR_OK}]
ini:
- {key: ok, section: colors}
COLOR_SKIP:
name: Color for 'skip' task status
default: cyan
description: Defines the color to use when showing 'Skipped' task status.
env: [{name: ANSIBLE_COLOR_SKIP}]
ini:
- {key: skip, section: colors}
COLOR_UNREACHABLE:
name: Color for 'unreachable' host state
default: bright red
description: Defines the color to use on 'Unreachable' status.
env: [{name: ANSIBLE_COLOR_UNREACHABLE}]
ini:
- {key: unreachable, section: colors}
COLOR_VERBOSE:
name: Color for verbose messages
default: blue
description: Defines the color to use when emitting verbose messages. In other words, those that show with '-v's.
env: [{name: ANSIBLE_COLOR_VERBOSE}]
ini:
- {key: verbose, section: colors}
COLOR_WARN:
name: Color for warning messages
default: bright purple
description: Defines the color to use when emitting warning messages.
env: [{name: ANSIBLE_COLOR_WARN}]
ini:
- {key: warn, section: colors}
CONNECTION_PASSWORD_FILE:
name: Connection password file
default: ~
description: 'The password file to use for the connection plugin. ``--connection-password-file``.'
env: [{name: ANSIBLE_CONNECTION_PASSWORD_FILE}]
ini:
- {key: connection_password_file, section: defaults}
type: path
version_added: '2.12'
COVERAGE_REMOTE_OUTPUT:
name: Sets the output directory and filename prefix to generate coverage run info.
description:
- Sets the output directory on the remote host to generate coverage reports into.
- Currently only used for remote coverage on PowerShell modules.
- This is for internal use only.
env:
- {name: _ANSIBLE_COVERAGE_REMOTE_OUTPUT}
vars:
- {name: _ansible_coverage_remote_output}
type: str
version_added: '2.9'
COVERAGE_REMOTE_PATHS:
name: Sets the list of paths to run coverage for.
description:
- A list of paths for files on the Ansible controller to run coverage for when executing on the remote host.
- Only files that match the path glob will have their coverage collected.
- Multiple path globs can be specified and are separated by ``:``.
- Currently only used for remote coverage on PowerShell modules.
- This is for internal use only.
default: '*'
env:
- {name: _ANSIBLE_COVERAGE_REMOTE_PATH_FILTER}
type: str
version_added: '2.9'
ACTION_WARNINGS:
name: Toggle action warnings
default: True
description:
- By default, Ansible will issue a warning when received from a task action (module or action plugin).
- These warnings can be silenced by adjusting this setting to False.
env: [{name: ANSIBLE_ACTION_WARNINGS}]
ini:
- {key: action_warnings, section: defaults}
type: boolean
version_added: "2.5"
LOCALHOST_WARNING:
name: Warning when using implicit inventory with only localhost
default: True
description:
- By default, Ansible will issue a warning when there are no hosts in the
inventory.
- These warnings can be silenced by adjusting this setting to False.
env: [{name: ANSIBLE_LOCALHOST_WARNING}]
ini:
- {key: localhost_warning, section: defaults}
type: boolean
version_added: "2.6"
LOG_VERBOSITY:
name: Default log verbosity
description:
- This will set log verbosity if higher than the normal display verbosity, otherwise it will match that.
env: [{name: ANSIBLE_LOG_VERBOSITY}]
ini:
- {key: log_verbosity, section: defaults}
type: int
version_added: "2.17"
INVENTORY_UNPARSED_WARNING:
name: Warning when no inventory files can be parsed, resulting in an implicit inventory with only localhost
default: True
description:
- By default, Ansible will issue a warning when no inventory was loaded and notes that
it will use an implicit localhost-only inventory.
- These warnings can be silenced by adjusting this setting to False.
env: [{name: ANSIBLE_INVENTORY_UNPARSED_WARNING}]
ini:
- {key: inventory_unparsed_warning, section: inventory}
type: boolean
version_added: "2.14"
DOC_FRAGMENT_PLUGIN_PATH:
name: documentation fragment plugins path
default: '{{ ANSIBLE_HOME ~ "/plugins/doc_fragments:/usr/share/ansible/plugins/doc_fragments" }}'
description: Colon-separated paths in which Ansible will search for Documentation Fragments Plugins.
env: [{name: ANSIBLE_DOC_FRAGMENT_PLUGINS}]
ini:
- {key: doc_fragment_plugins, section: defaults}
type: pathspec
DEFAULT_ACTION_PLUGIN_PATH:
name: Action plugins path
default: '{{ ANSIBLE_HOME ~ "/plugins/action:/usr/share/ansible/plugins/action" }}'
description: Colon-separated paths in which Ansible will search for Action Plugins.
env: [{name: ANSIBLE_ACTION_PLUGINS}]
ini:
- {key: action_plugins, section: defaults}
type: pathspec
yaml: {key: plugins.action.path}
DEFAULT_ALLOW_UNSAFE_LOOKUPS:
name: Allow unsafe lookups
default: False
description:
- "When enabled, this option allows lookup plugins (whether used in variables as ``{{lookup('foo')}}`` or as a loop as with_foo)
to return data that is not marked 'unsafe'."
- By default, such data is marked as unsafe to prevent the templating engine from evaluating any jinja2 templating language,
as this could represent a security risk. This option is provided to allow for backward compatibility,
however, users should first consider adding allow_unsafe=True to any lookups that may be expected to contain data that may be run
through the templating engine late.
env: []
ini:
- {key: allow_unsafe_lookups, section: defaults}
type: boolean
version_added: "2.2.3"
DEFAULT_ASK_PASS:
name: Ask for the login password
default: False
description:
- This controls whether an Ansible playbook should prompt for a login password.
If using SSH keys for authentication, you probably do not need to change this setting.
env: [{name: ANSIBLE_ASK_PASS}]
ini:
- {key: ask_pass, section: defaults}
type: boolean
yaml: {key: defaults.ask_pass}
DEFAULT_ASK_VAULT_PASS:
name: Ask for the vault password(s)
default: False
description:
- This controls whether an Ansible playbook should prompt for a vault password.
env: [{name: ANSIBLE_ASK_VAULT_PASS}]
ini:
- {key: ask_vault_pass, section: defaults}
type: boolean
DEFAULT_BECOME:
name: Enable privilege escalation (become)
default: False
description: Toggles the use of privilege escalation, allowing you to 'become' another user after login.
env: [{name: ANSIBLE_BECOME}]
ini:
- {key: become, section: privilege_escalation}
type: boolean
DEFAULT_BECOME_ASK_PASS:
name: Ask for the privilege escalation (become) password
default: False
description: Toggle to prompt for privilege escalation password.
env: [{name: ANSIBLE_BECOME_ASK_PASS}]
ini:
- {key: become_ask_pass, section: privilege_escalation}
type: boolean
DEFAULT_BECOME_METHOD:
name: Choose privilege escalation method
default: 'sudo'
description: Privilege escalation method to use when `become` is enabled.
env: [{name: ANSIBLE_BECOME_METHOD}]
ini:
- {section: privilege_escalation, key: become_method}
DEFAULT_BECOME_EXE:
name: Choose 'become' executable
default: ~
description: 'executable to use for privilege escalation, otherwise Ansible will depend on PATH.'
env: [{name: ANSIBLE_BECOME_EXE}]
ini:
- {key: become_exe, section: privilege_escalation}
DEFAULT_BECOME_FLAGS:
name: Set 'become' executable options
default: ''
description: Flags to pass to the privilege escalation executable.
env: [{name: ANSIBLE_BECOME_FLAGS}]
ini:
- {key: become_flags, section: privilege_escalation}
BECOME_PLUGIN_PATH:
name: Become plugins path
default: '{{ ANSIBLE_HOME ~ "/plugins/become:/usr/share/ansible/plugins/become" }}'
description: Colon-separated paths in which Ansible will search for Become Plugins.
env: [{name: ANSIBLE_BECOME_PLUGINS}]
ini:
- {key: become_plugins, section: defaults}
type: pathspec
version_added: "2.8"
DEFAULT_BECOME_USER:
# FIXME: should really be blank and make -u passing optional depending on it
name: Set the user you 'become' via privilege escalation
default: root
description: The user your login/remote user 'becomes' when using privilege escalation, most systems will use 'root' when no user is specified.
env: [{name: ANSIBLE_BECOME_USER}]
ini:
- {key: become_user, section: privilege_escalation}
yaml: {key: become.user}
DEFAULT_CACHE_PLUGIN_PATH:
name: Cache Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/cache:/usr/share/ansible/plugins/cache" }}'
description: Colon-separated paths in which Ansible will search for Cache Plugins.
env: [{name: ANSIBLE_CACHE_PLUGINS}]
ini:
- {key: cache_plugins, section: defaults}
type: pathspec
DEFAULT_CALLBACK_PLUGIN_PATH:
name: Callback Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/callback:/usr/share/ansible/plugins/callback" }}'
description: Colon-separated paths in which Ansible will search for Callback Plugins.
env: [{name: ANSIBLE_CALLBACK_PLUGINS}]
ini:
- {key: callback_plugins, section: defaults}
type: pathspec
yaml: {key: plugins.callback.path}
CALLBACKS_ENABLED:
name: Enable callback plugins that require it.
default: []
description:
- "List of enabled callbacks, not all callbacks need enabling,
but many of those shipped with Ansible do as we don't want them activated by default."
env:
- name: ANSIBLE_CALLBACKS_ENABLED
version_added: '2.11'
ini:
- key: callbacks_enabled
section: defaults
version_added: '2.11'
type: list
DEFAULT_CLICONF_PLUGIN_PATH:
name: Cliconf Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/cliconf:/usr/share/ansible/plugins/cliconf" }}'
description: Colon-separated paths in which Ansible will search for Cliconf Plugins.
env: [{name: ANSIBLE_CLICONF_PLUGINS}]
ini:
- {key: cliconf_plugins, section: defaults}
type: pathspec
DEFAULT_CONNECTION_PLUGIN_PATH:
name: Connection Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/connection:/usr/share/ansible/plugins/connection" }}'
description: Colon-separated paths in which Ansible will search for Connection Plugins.
env: [{name: ANSIBLE_CONNECTION_PLUGINS}]
ini:
- {key: connection_plugins, section: defaults}
type: pathspec
yaml: {key: plugins.connection.path}
DEFAULT_DEBUG:
name: Debug mode
default: False
description:
- "Toggles debug output in Ansible. This is *very* verbose and can hinder
multiprocessing. Debug output can also include secret information
despite no_log settings being enabled, which means debug mode should not be used in
production."
env: [{name: ANSIBLE_DEBUG}]
ini:
- {key: debug, section: defaults}
type: boolean
DEFAULT_EXECUTABLE:
name: Target shell executable
default: /bin/sh
description:
- "This indicates the command to use to spawn a shell under, which is required for Ansible's execution needs on a target.
Users may need to change this in rare instances when shell usage is constrained, but in most cases, it may be left as is."
env: [{name: ANSIBLE_EXECUTABLE}]
ini:
- {key: executable, section: defaults}
DEFAULT_FACT_PATH:
name: local fact path
description:
- "This option allows you to globally configure a custom path for 'local_facts' for the implied :ref:`ansible_collections.ansible.builtin.setup_module` task when using fact gathering."
- "If not set, it will fall back to the default from the ``ansible.builtin.setup`` module: ``/etc/ansible/facts.d``."
- "This does **not** affect user defined tasks that use the ``ansible.builtin.setup`` module."
- The real action being created by the implicit task is currently ``ansible.legacy.gather_facts`` module, which then calls the configured fact modules,
by default this will be ``ansible.builtin.setup`` for POSIX systems but other platforms might have different defaults.
env: [{name: ANSIBLE_FACT_PATH}]
ini:
- {key: fact_path, section: defaults}
type: string
deprecated:
# TODO: when removing set playbook/play.py to default=None
why: the module_defaults keyword is a more generic version and can apply to all calls to the
M(ansible.builtin.gather_facts) or M(ansible.builtin.setup) actions
version: "2.18"
alternatives: module_defaults
DEFAULT_FILTER_PLUGIN_PATH:
name: Jinja2 Filter Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/filter:/usr/share/ansible/plugins/filter" }}'
description: Colon-separated paths in which Ansible will search for Jinja2 Filter Plugins.
env: [{name: ANSIBLE_FILTER_PLUGINS}]
ini:
- {key: filter_plugins, section: defaults}
type: pathspec
DEFAULT_FORCE_HANDLERS:
name: Force handlers to run after failure
default: False
description:
- This option controls if notified handlers run on a host even if a failure occurs on that host.
- When false, the handlers will not run if a failure has occurred on a host.
- This can also be set per play or on the command line. See Handlers and Failure for more details.
env: [{name: ANSIBLE_FORCE_HANDLERS}]
ini:
- {key: force_handlers, section: defaults}
type: boolean
version_added: "1.9.1"
DEFAULT_FORKS:
name: Number of task forks
default: 5
description: Maximum number of forks Ansible will use to execute tasks on target hosts.
env: [{name: ANSIBLE_FORKS}]
ini:
- {key: forks, section: defaults}
type: integer
DEFAULT_GATHERING:
name: Gathering behaviour
default: 'implicit'
description:
- This setting controls the default policy of fact gathering (facts discovered about remote systems).
- "This option can be useful for those wishing to save fact gathering time. Both 'smart' and 'explicit' will use the cache plugin."
env: [{name: ANSIBLE_GATHERING}]
ini:
- key: gathering
section: defaults
version_added: "1.6"
choices:
implicit: "the cache plugin will be ignored and facts will be gathered per play unless 'gather_facts: False' is set."
explicit: facts will not be gathered unless directly requested in the play.
smart: each new host that has no facts discovered will be scanned, but if the same host is addressed in multiple plays it will not be contacted again in the run.
DEFAULT_GATHER_SUBSET:
name: Gather facts subset
description:
- Set the `gather_subset` option for the :ref:`ansible_collections.ansible.builtin.setup_module` task in the implicit fact gathering.
See the module documentation for specifics.
- "It does **not** apply to user defined ``ansible.builtin.setup`` tasks."
env: [{name: ANSIBLE_GATHER_SUBSET}]
ini:
- key: gather_subset
section: defaults
version_added: "2.1"
type: list
deprecated:
# TODO: when removing set playbook/play.py to default=None
why: the module_defaults keyword is a more generic version and can apply to all calls to the
M(ansible.builtin.gather_facts) or M(ansible.builtin.setup) actions
version: "2.18"
alternatives: module_defaults
DEFAULT_GATHER_TIMEOUT:
name: Gather facts timeout
description:
- Set the timeout in seconds for the implicit fact gathering, see the module documentation for specifics.
- "It does **not** apply to user defined :ref:`ansible_collections.ansible.builtin.setup_module` tasks."
env: [{name: ANSIBLE_GATHER_TIMEOUT}]
ini:
- {key: gather_timeout, section: defaults}
type: integer
deprecated:
# TODO: when removing set playbook/play.py to default=None
why: the module_defaults keyword is a more generic version and can apply to all calls to the
M(ansible.builtin.gather_facts) or M(ansible.builtin.setup) actions
version: "2.18"
alternatives: module_defaults
DEFAULT_HASH_BEHAVIOUR:
name: Hash merge behaviour
default: replace
type: string
choices:
replace: Any variable that is defined more than once is overwritten using the order from variable precedence rules (highest wins).
merge: Any dictionary variable will be recursively merged with new definitions across the different variable definition sources.
description:
- This setting controls how duplicate definitions of dictionary variables (aka hash, map, associative array) are handled in Ansible.
- This does not affect variables whose values are scalars (integers, strings) or arrays.
- "**WARNING**, changing this setting is not recommended as this is fragile and makes your content (plays, roles, collections) nonportable,
leading to continual confusion and misuse. Don't change this setting unless you think you have an absolute need for it."
- We recommend avoiding reusing variable names and relying on the ``combine`` filter and ``vars`` and ``varnames`` lookups
to create merged versions of the individual variables. In our experience, this is rarely needed and is a sign that too much
complexity has been introduced into the data structures and plays.
- For some uses you can also look into custom vars_plugins to merge on input, even substituting the default ``host_group_vars``
that is in charge of parsing the ``host_vars/`` and ``group_vars/`` directories. Most users of this setting are only interested in inventory scope,
but the setting itself affects all sources and makes debugging even harder.
- All playbooks and roles in the official examples repos assume the default for this setting.
- Changing the setting to ``merge`` applies across variable sources, but many sources will internally still overwrite the variables.
For example ``include_vars`` will dedupe variables internally before updating Ansible, with 'last defined' overwriting previous definitions in same file.
- The Ansible project recommends you **avoid ``merge`` for new projects.**
- It is the intention of the Ansible developers to eventually deprecate and remove this setting, but it is being kept as some users do heavily rely on it.
New projects should **avoid 'merge'**.
env: [{name: ANSIBLE_HASH_BEHAVIOUR}]
ini:
- {key: hash_behaviour, section: defaults}
DEFAULT_HOST_LIST:
name: Inventory Source
default: /etc/ansible/hosts
description: Comma-separated list of Ansible inventory sources
env:
- name: ANSIBLE_INVENTORY
expand_relative_paths: True
ini:
- key: inventory
section: defaults
type: pathlist
yaml: {key: defaults.inventory}
DEFAULT_HTTPAPI_PLUGIN_PATH:
name: HttpApi Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/httpapi:/usr/share/ansible/plugins/httpapi" }}'
description: Colon-separated paths in which Ansible will search for HttpApi Plugins.
env: [{name: ANSIBLE_HTTPAPI_PLUGINS}]
ini:
- {key: httpapi_plugins, section: defaults}
type: pathspec
DEFAULT_INTERNAL_POLL_INTERVAL:
name: Internal poll interval
default: 0.001
env: []
ini:
- {key: internal_poll_interval, section: defaults}
type: float
version_added: "2.2"
description:
- This sets the interval (in seconds) of Ansible internal processes polling each other.
Lower values improve performance with large playbooks at the expense of extra CPU load.
Higher values are more suitable for Ansible usage in automation scenarios
when UI responsiveness is not required but CPU usage might be a concern.
- "The default corresponds to the value hardcoded in Ansible <= 2.1"
DEFAULT_INVENTORY_PLUGIN_PATH:
name: Inventory Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/inventory:/usr/share/ansible/plugins/inventory" }}'
description: Colon-separated paths in which Ansible will search for Inventory Plugins.
env: [{name: ANSIBLE_INVENTORY_PLUGINS}]
ini:
- {key: inventory_plugins, section: defaults}
type: pathspec
DEFAULT_JINJA2_EXTENSIONS:
name: Enabled Jinja2 extensions
default: []
description:
- This is a developer-specific feature that allows enabling additional Jinja2 extensions.
- "See the Jinja2 documentation for details. If you do not know what these do, you probably don't need to change this setting :)"
env: [{name: ANSIBLE_JINJA2_EXTENSIONS}]
ini:
- {key: jinja2_extensions, section: defaults}
DEFAULT_JINJA2_NATIVE:
name: Use Jinja2's NativeEnvironment for templating
default: False
description: This option preserves variable types during template operations.
env: [{name: ANSIBLE_JINJA2_NATIVE}]
ini:
- {key: jinja2_native, section: defaults}
type: boolean
yaml: {key: jinja2_native}
version_added: 2.7
DEFAULT_KEEP_REMOTE_FILES:
name: Keep remote files
default: False
description:
- Enables/disables the cleaning up of the temporary files Ansible used to execute the tasks on the remote.
- If this option is enabled it will disable ``ANSIBLE_PIPELINING``.
env: [{name: ANSIBLE_KEEP_REMOTE_FILES}]
ini:
- {key: keep_remote_files, section: defaults}
type: boolean
DEFAULT_LIBVIRT_LXC_NOSECLABEL:
# TODO: move to plugin
name: No security label on Lxc
default: False
description:
- "This setting causes libvirt to connect to LXC containers by passing ``--noseclabel`` parameter to ``virsh`` command.
This is necessary when running on systems which do not have SELinux."
env:
- name: ANSIBLE_LIBVIRT_LXC_NOSECLABEL
ini:
- {key: libvirt_lxc_noseclabel, section: selinux}
type: boolean
version_added: "2.1"
DEFAULT_LOAD_CALLBACK_PLUGINS:
name: Load callbacks for adhoc
default: False
description:
- Controls whether callback plugins are loaded when running /usr/bin/ansible.
This may be used to log activity from the command line, send notifications, and so on.
Callback plugins are always loaded for ``ansible-playbook``.
env: [{name: ANSIBLE_LOAD_CALLBACK_PLUGINS}]
ini:
- {key: bin_ansible_callbacks, section: defaults}
type: boolean
version_added: "1.8"
DEFAULT_LOCAL_TMP:
name: Controller temporary directory
default: '{{ ANSIBLE_HOME ~ "/tmp" }}'
description: Temporary directory for Ansible to use on the controller.
env: [{name: ANSIBLE_LOCAL_TEMP}]
ini:
- {key: local_tmp, section: defaults}
type: tmppath
DEFAULT_LOG_PATH:
name: Ansible log file path
default: ~
description: File to which Ansible will log on the controller. When empty logging is disabled.
env: [{name: ANSIBLE_LOG_PATH}]
ini:
- {key: log_path, section: defaults}
type: path
DEFAULT_LOG_FILTER:
name: Name filters for python logger
default: []
description: List of logger names to filter out of the log file.
env: [{name: ANSIBLE_LOG_FILTER}]
ini:
- {key: log_filter, section: defaults}
type: list
DEFAULT_LOOKUP_PLUGIN_PATH:
name: Lookup Plugins Path
description: Colon-separated paths in which Ansible will search for Lookup Plugins.
default: '{{ ANSIBLE_HOME ~ "/plugins/lookup:/usr/share/ansible/plugins/lookup" }}'
env: [{name: ANSIBLE_LOOKUP_PLUGINS}]
ini:
- {key: lookup_plugins, section: defaults}
type: pathspec
yaml: {key: defaults.lookup_plugins}
DEFAULT_MANAGED_STR:
name: Ansible managed
default: 'Ansible managed'
description: Sets the macro for the 'ansible_managed' variable available for :ref:`ansible_collections.ansible.builtin.template_module` and :ref:`ansible_collections.ansible.windows.win_template_module`. This is only relevant to those two modules.
env: []
ini:
- {key: ansible_managed, section: defaults}
yaml: {key: defaults.ansible_managed}
DEFAULT_MODULE_ARGS:
name: Adhoc default arguments
default: ~
description:
- This sets the default arguments to pass to the ``ansible`` adhoc binary if no ``-a`` is specified.
env: [{name: ANSIBLE_MODULE_ARGS}]
ini:
- {key: module_args, section: defaults}
DEFAULT_MODULE_COMPRESSION:
name: Python module compression
default: ZIP_DEFLATED
description: Compression scheme to use when transferring Python modules to the target.
env: []
ini:
- {key: module_compression, section: defaults}
# vars:
# - name: ansible_module_compression
DEFAULT_MODULE_NAME:
name: Default adhoc module
default: command
description: "Module to use with the ``ansible`` AdHoc command, if none is specified via ``-m``."
env: []
ini:
- {key: module_name, section: defaults}
DEFAULT_MODULE_PATH:
name: Modules Path
description: Colon-separated paths in which Ansible will search for Modules.
default: '{{ ANSIBLE_HOME ~ "/plugins/modules:/usr/share/ansible/plugins/modules" }}'
env: [{name: ANSIBLE_LIBRARY}]
ini:
- {key: library, section: defaults}
type: pathspec
DEFAULT_MODULE_UTILS_PATH:
name: Module Utils Path
description: Colon-separated paths in which Ansible will search for Module utils files, which are shared by modules.
default: '{{ ANSIBLE_HOME ~ "/plugins/module_utils:/usr/share/ansible/plugins/module_utils" }}'
env: [{name: ANSIBLE_MODULE_UTILS}]
ini:
- {key: module_utils, section: defaults}
type: pathspec
DEFAULT_NETCONF_PLUGIN_PATH:
name: Netconf Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/netconf:/usr/share/ansible/plugins/netconf" }}'
description: Colon-separated paths in which Ansible will search for Netconf Plugins.
env: [{name: ANSIBLE_NETCONF_PLUGINS}]
ini:
- {key: netconf_plugins, section: defaults}
type: pathspec
DEFAULT_NO_LOG:
name: No log
default: False
description: "Toggle Ansible's display and logging of task details, mainly used to avoid security disclosures."
env: [{name: ANSIBLE_NO_LOG}]
ini:
- {key: no_log, section: defaults}
type: boolean
DEFAULT_NO_TARGET_SYSLOG:
name: No syslog on target
default: False
description:
- Toggle Ansible logging to syslog on the target when it executes tasks. On Windows hosts, this will disable a newer
style PowerShell modules from writing to the event log.
env: [{name: ANSIBLE_NO_TARGET_SYSLOG}]
ini:
- {key: no_target_syslog, section: defaults}
vars:
- name: ansible_no_target_syslog
version_added: '2.10'
type: boolean
yaml: {key: defaults.no_target_syslog}
DEFAULT_NULL_REPRESENTATION:
name: Represent a null
default: ~
description: What templating should return as a 'null' value. When not set it will let Jinja2 decide.
env: [{name: ANSIBLE_NULL_REPRESENTATION}]
ini:
- {key: null_representation, section: defaults}
type: raw
DEFAULT_POLL_INTERVAL:
name: Async poll interval
default: 15
description:
- For asynchronous tasks in Ansible (covered in Asynchronous Actions and Polling),
this is how often to check back on the status of those tasks when an explicit poll interval is not supplied.
The default is a reasonably moderate 15 seconds which is a tradeoff between checking in frequently and
providing a quick turnaround when something may have completed.
env: [{name: ANSIBLE_POLL_INTERVAL}]
ini:
- {key: poll_interval, section: defaults}
type: integer
DEFAULT_PRIVATE_KEY_FILE:
name: Private key file
default: ~
description:
- Option for connections using a certificate or key file to authenticate, rather than an agent or passwords,
you can set the default value here to avoid re-specifying ``--private-key`` with every invocation.
env: [{name: ANSIBLE_PRIVATE_KEY_FILE}]
ini:
- {key: private_key_file, section: defaults}
type: path
DEFAULT_PRIVATE_ROLE_VARS:
name: Private role variables
default: False
description:
- Makes role variables inaccessible from other roles.
- This was introduced as a way to reset role variables to default values if
a role is used more than once in a playbook.
- Starting in version '2.17' M(ansible.builtin.include_roles) and M(ansible.builtin.import_roles) can override this via the C(public) parameter.
env: [{name: ANSIBLE_PRIVATE_ROLE_VARS}]
ini:
- {key: private_role_vars, section: defaults}
type: boolean
yaml: {key: defaults.private_role_vars}
DEFAULT_REMOTE_PORT:
name: Remote port
default: ~
description: Port to use in remote connections, when blank it will use the connection plugin default.
env: [{name: ANSIBLE_REMOTE_PORT}]
ini:
- {key: remote_port, section: defaults}
type: integer
yaml: {key: defaults.remote_port}
DEFAULT_REMOTE_USER:
name: Login/Remote User
description:
- Sets the login user for the target machines
- "When blank it uses the connection plugin's default, normally the user currently executing Ansible."
env: [{name: ANSIBLE_REMOTE_USER}]
ini:
- {key: remote_user, section: defaults}
DEFAULT_ROLES_PATH:
name: Roles path
default: '{{ ANSIBLE_HOME ~ "/roles:/usr/share/ansible/roles:/etc/ansible/roles" }}'
description: Colon-separated paths in which Ansible will search for Roles.
env: [{name: ANSIBLE_ROLES_PATH}]
expand_relative_paths: True
ini:
- {key: roles_path, section: defaults}
type: pathspec
yaml: {key: defaults.roles_path}
DEFAULT_SELINUX_SPECIAL_FS:
name: Problematic file systems
default: fuse, nfs, vboxsf, ramfs, 9p, vfat
description:
- "Some filesystems do not support safe operations and/or return inconsistent errors,
this setting makes Ansible 'tolerate' those in the list without causing fatal errors."
- Data corruption may occur and writes are not always verified when a filesystem is in the list.
env:
- name: ANSIBLE_SELINUX_SPECIAL_FS
version_added: "2.9"
ini:
- {key: special_context_filesystems, section: selinux}
type: list
DEFAULT_STDOUT_CALLBACK:
name: Main display callback plugin
default: default
description:
- "Set the main callback used to display Ansible output. You can only have one at a time."
- You can have many other callbacks, but just one can be in charge of stdout.
- See :ref:`callback_plugins` for a list of available options.
env: [{name: ANSIBLE_STDOUT_CALLBACK}]
ini:
- {key: stdout_callback, section: defaults}
EDITOR:
name: editor application to use
default: vi
descrioption:
- for the cases in which Ansible needs to return a file within an editor, this chooses the application to use.
ini:
- section: defaults
key: editor
version_added: '2.15'
env:
- name: ANSIBLE_EDITOR
version_added: '2.15'
- name: EDITOR
ENABLE_TASK_DEBUGGER:
name: Whether to enable the task debugger
default: False
description:
- Whether or not to enable the task debugger, this previously was done as a strategy plugin.
- Now all strategy plugins can inherit this behavior. The debugger defaults to activating when
- a task is failed on unreachable. Use the debugger keyword for more flexibility.
type: boolean
env: [{name: ANSIBLE_ENABLE_TASK_DEBUGGER}]
ini:
- {key: enable_task_debugger, section: defaults}
version_added: "2.5"
TASK_DEBUGGER_IGNORE_ERRORS:
name: Whether a failed task with ignore_errors=True will still invoke the debugger
default: True
description:
- This option defines whether the task debugger will be invoked on a failed task when ignore_errors=True
is specified.
- True specifies that the debugger will honor ignore_errors, and False will not honor ignore_errors.
type: boolean
env: [{name: ANSIBLE_TASK_DEBUGGER_IGNORE_ERRORS}]
ini:
- {key: task_debugger_ignore_errors, section: defaults}
version_added: "2.7"
DEFAULT_STRATEGY:
name: Implied strategy
default: 'linear'
description: Set the default strategy used for plays.
env: [{name: ANSIBLE_STRATEGY}]
ini:
- {key: strategy, section: defaults}
version_added: "2.3"
DEFAULT_STRATEGY_PLUGIN_PATH:
name: Strategy Plugins Path
description: Colon-separated paths in which Ansible will search for Strategy Plugins.
default: '{{ ANSIBLE_HOME ~ "/plugins/strategy:/usr/share/ansible/plugins/strategy" }}'
env: [{name: ANSIBLE_STRATEGY_PLUGINS}]
ini:
- {key: strategy_plugins, section: defaults}
type: pathspec
DEFAULT_SU:
default: False
description: 'Toggle the use of "su" for tasks.'
env: [{name: ANSIBLE_SU}]
ini:
- {key: su, section: defaults}
type: boolean
yaml: {key: defaults.su}
DEFAULT_SYSLOG_FACILITY:
name: syslog facility
default: LOG_USER
description: Syslog facility to use when Ansible logs to the remote target.
env: [{name: ANSIBLE_SYSLOG_FACILITY}]
ini:
- {key: syslog_facility, section: defaults}
DEFAULT_TERMINAL_PLUGIN_PATH:
name: Terminal Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/terminal:/usr/share/ansible/plugins/terminal" }}'
description: Colon-separated paths in which Ansible will search for Terminal Plugins.
env: [{name: ANSIBLE_TERMINAL_PLUGINS}]
ini:
- {key: terminal_plugins, section: defaults}
type: pathspec
DEFAULT_TEST_PLUGIN_PATH:
name: Jinja2 Test Plugins Path
description: Colon-separated paths in which Ansible will search for Jinja2 Test Plugins.
default: '{{ ANSIBLE_HOME ~ "/plugins/test:/usr/share/ansible/plugins/test" }}'
env: [{name: ANSIBLE_TEST_PLUGINS}]
ini:
- {key: test_plugins, section: defaults}
type: pathspec
DEFAULT_TIMEOUT:
name: Connection timeout
default: 10
description: This is the default timeout for connection plugins to use.
env: [{name: ANSIBLE_TIMEOUT}]
ini:
- {key: timeout, section: defaults}
type: integer
DEFAULT_TRANSPORT:
name: Connection plugin
default: ssh
description:
- Can be any connection plugin available to your ansible installation.
- There is also a (DEPRECATED) special 'smart' option, that will toggle between 'ssh' and 'paramiko' depending on controller OS and ssh versions.
env: [{name: ANSIBLE_TRANSPORT}]
ini:
- {key: transport, section: defaults}
DEFAULT_UNDEFINED_VAR_BEHAVIOR:
name: Jinja2 fail on undefined
default: True
version_added: "1.3"
description:
- When True, this causes ansible templating to fail steps that reference variable names that are likely typoed.
- "Otherwise, any '{{ template_expression }}' that contains undefined variables will be rendered in a template or ansible action line exactly as written."
env: [{name: ANSIBLE_ERROR_ON_UNDEFINED_VARS}]
ini:
- {key: error_on_undefined_vars, section: defaults}
type: boolean
DEFAULT_VARS_PLUGIN_PATH:
name: Vars Plugins Path
default: '{{ ANSIBLE_HOME ~ "/plugins/vars:/usr/share/ansible/plugins/vars" }}'
description: Colon-separated paths in which Ansible will search for Vars Plugins.
env: [{name: ANSIBLE_VARS_PLUGINS}]
ini:
- {key: vars_plugins, section: defaults}
type: pathspec
# TODO: unused?
#DEFAULT_VAR_COMPRESSION_LEVEL:
# default: 0
# description: 'TODO: write it'
# env: [{name: ANSIBLE_VAR_COMPRESSION_LEVEL}]
# ini:
# - {key: var_compression_level, section: defaults}
# type: integer
# yaml: {key: defaults.var_compression_level}
DEFAULT_VAULT_ID_MATCH:
name: Force vault id match
default: False
description: 'If true, decrypting vaults with a vault id will only try the password from the matching vault-id.'
env: [{name: ANSIBLE_VAULT_ID_MATCH}]
ini:
- {key: vault_id_match, section: defaults}
yaml: {key: defaults.vault_id_match}
DEFAULT_VAULT_IDENTITY:
name: Vault id label
default: default
description: 'The label to use for the default vault id label in cases where a vault id label is not provided.'
env: [{name: ANSIBLE_VAULT_IDENTITY}]
ini:
- {key: vault_identity, section: defaults}
yaml: {key: defaults.vault_identity}
VAULT_ENCRYPT_SALT:
name: Vault salt to use for encryption
default: ~
description: 'The salt to use for the vault encryption. If it is not provided, a random salt will be used.'
env: [{name: ANSIBLE_VAULT_ENCRYPT_SALT}]
ini:
- {key: vault_encrypt_salt, section: defaults}
version_added: '2.15'
DEFAULT_VAULT_ENCRYPT_IDENTITY:
name: Vault id to use for encryption
description: 'The vault_id to use for encrypting by default. If multiple vault_ids are provided, this specifies which to use for encryption. The ``--encrypt-vault-id`` CLI option overrides the configured value.'
env: [{name: ANSIBLE_VAULT_ENCRYPT_IDENTITY}]
ini:
- {key: vault_encrypt_identity, section: defaults}
yaml: {key: defaults.vault_encrypt_identity}
DEFAULT_VAULT_IDENTITY_LIST:
name: Default vault ids
default: []
description: 'A list of vault-ids to use by default. Equivalent to multiple ``--vault-id`` args. Vault-ids are tried in order.'
env: [{name: ANSIBLE_VAULT_IDENTITY_LIST}]
ini:
- {key: vault_identity_list, section: defaults}
type: list
yaml: {key: defaults.vault_identity_list}
DEFAULT_VAULT_PASSWORD_FILE:
name: Vault password file
default: ~
description:
- 'The vault password file to use. Equivalent to ``--vault-password-file`` or ``--vault-id``.'
- If executable, it will be run and the resulting stdout will be used as the password.
env: [{name: ANSIBLE_VAULT_PASSWORD_FILE}]
ini:
- {key: vault_password_file, section: defaults}
type: path
yaml: {key: defaults.vault_password_file}
DEFAULT_VERBOSITY:
name: Verbosity
default: 0
description: Sets the default verbosity, equivalent to the number of ``-v`` passed in the command line.
env: [{name: ANSIBLE_VERBOSITY}]
ini:
- {key: verbosity, section: defaults}
type: integer
DEPRECATION_WARNINGS:
name: Deprecation messages
default: True
description: "Toggle to control the showing of deprecation warnings"
env: [{name: ANSIBLE_DEPRECATION_WARNINGS}]
ini:
- {key: deprecation_warnings, section: defaults}
type: boolean
DEVEL_WARNING:
name: Running devel warning
default: True
description: Toggle to control showing warnings related to running devel.
env: [{name: ANSIBLE_DEVEL_WARNING}]
ini:
- {key: devel_warning, section: defaults}
type: boolean
DIFF_ALWAYS:
name: Show differences
default: False
description: Configuration toggle to tell modules to show differences when in 'changed' status, equivalent to ``--diff``.
env: [{name: ANSIBLE_DIFF_ALWAYS}]
ini:
- {key: always, section: diff}
type: bool
DIFF_CONTEXT:
name: Difference context
default: 3
description: Number of lines of context to show when displaying the differences between files.
env: [{name: ANSIBLE_DIFF_CONTEXT}]
ini:
- {key: context, section: diff}
type: integer
DISPLAY_ARGS_TO_STDOUT:
name: Show task arguments
default: False
description:
- "Normally ``ansible-playbook`` will print a header for each task that is run.
These headers will contain the name: field from the task if you specified one.
If you didn't then ``ansible-playbook`` uses the task's action to help you tell which task is presently running.
Sometimes you run many of the same action and so you want more information about the task to differentiate it from others of the same action.
If you set this variable to True in the config then ``ansible-playbook`` will also include the task's arguments in the header."
- "This setting defaults to False because there is a chance that you have sensitive values in your parameters and
you do not want those to be printed."
- "If you set this to True you should be sure that you have secured your environment's stdout
(no one can shoulder surf your screen and you aren't saving stdout to an insecure file) or
made sure that all of your playbooks explicitly added the ``no_log: True`` parameter to tasks that have sensitive values
:ref:`keep_secret_data` for more information."
env: [{name: ANSIBLE_DISPLAY_ARGS_TO_STDOUT}]
ini:
- {key: display_args_to_stdout, section: defaults}
type: boolean
version_added: "2.1"
DISPLAY_SKIPPED_HOSTS:
name: Show skipped results
default: True
description: "Toggle to control displaying skipped task/host entries in a task in the default callback."
env:
- name: ANSIBLE_DISPLAY_SKIPPED_HOSTS
ini:
- {key: display_skipped_hosts, section: defaults}
type: boolean
DOCSITE_ROOT_URL:
name: Root docsite URL
default: https://docs.ansible.com/ansible-core/
description: Root docsite URL used to generate docs URLs in warning/error text;
must be an absolute URL with a valid scheme and trailing slash.
ini:
- {key: docsite_root_url, section: defaults}
version_added: "2.8"
DUPLICATE_YAML_DICT_KEY:
name: Controls ansible behaviour when finding duplicate keys in YAML.
default: warn
description:
- By default, Ansible will issue a warning when a duplicate dict key is encountered in YAML.
- These warnings can be silenced by adjusting this setting to False.
env: [{name: ANSIBLE_DUPLICATE_YAML_DICT_KEY}]
ini:
- {key: duplicate_dict_key, section: defaults}
type: string
choices: &basic_error2
error: issue a 'fatal' error and stop the play
warn: issue a warning but continue
ignore: just continue silently
version_added: "2.9"
ERROR_ON_MISSING_HANDLER:
name: Missing handler error
default: True
description: "Toggle to allow missing handlers to become a warning instead of an error when notifying."
env: [{name: ANSIBLE_ERROR_ON_MISSING_HANDLER}]
ini:
- {key: error_on_missing_handler, section: defaults}
type: boolean
CONNECTION_FACTS_MODULES:
name: Map of connections to fact modules
default:
# use ansible.legacy names on unqualified facts modules to allow library/ overrides
asa: ansible.legacy.asa_facts
cisco.asa.asa: cisco.asa.asa_facts
eos: ansible.legacy.eos_facts
arista.eos.eos: arista.eos.eos_facts
frr: ansible.legacy.frr_facts
frr.frr.frr: frr.frr.frr_facts
ios: ansible.legacy.ios_facts
cisco.ios.ios: cisco.ios.ios_facts
iosxr: ansible.legacy.iosxr_facts
cisco.iosxr.iosxr: cisco.iosxr.iosxr_facts
junos: ansible.legacy.junos_facts
junipernetworks.junos.junos: junipernetworks.junos.junos_facts
nxos: ansible.legacy.nxos_facts
cisco.nxos.nxos: cisco.nxos.nxos_facts
vyos: ansible.legacy.vyos_facts
vyos.vyos.vyos: vyos.vyos.vyos_facts
exos: ansible.legacy.exos_facts
extreme.exos.exos: extreme.exos.exos_facts
slxos: ansible.legacy.slxos_facts
extreme.slxos.slxos: extreme.slxos.slxos_facts
voss: ansible.legacy.voss_facts
extreme.voss.voss: extreme.voss.voss_facts
ironware: ansible.legacy.ironware_facts
community.network.ironware: community.network.ironware_facts
description: "Which modules to run during a play's fact gathering stage based on connection"
type: dict
FACTS_MODULES:
name: Gather Facts Modules
default:
- smart
description:
- "Which modules to run during a play's fact gathering stage, using the default of 'smart' will try to figure it out based on connection type."
- "If adding your own modules but you still want to use the default Ansible facts, you will want to include 'setup'
or corresponding network module to the list (if you add 'smart', Ansible will also figure it out)."
- "This does not affect explicit calls to the 'setup' module, but does always affect the 'gather_facts' action (implicit or explicit)."
env: [{name: ANSIBLE_FACTS_MODULES}]
ini:
- {key: facts_modules, section: defaults}
type: list
vars:
- name: ansible_facts_modules
GALAXY_IGNORE_CERTS:
name: Galaxy validate certs
description:
- If set to yes, ansible-galaxy will not validate TLS certificates.
This can be useful for testing against a server with a self-signed certificate.
env: [{name: ANSIBLE_GALAXY_IGNORE}]
ini:
- {key: ignore_certs, section: galaxy}
type: boolean
GALAXY_SERVER_TIMEOUT:
name: Default timeout to use for API calls
description:
- The default timeout for Galaxy API calls. Galaxy servers that don't configure a specific timeout will fall back to this value.
env: [{name: ANSIBLE_GALAXY_SERVER_TIMEOUT}]
default: 60
ini:
- {key: server_timeout, section: galaxy}
type: int
GALAXY_ROLE_SKELETON:
name: Galaxy role skeleton directory
description: Role skeleton directory to use as a template for the ``init`` action in ``ansible-galaxy``/``ansible-galaxy role``, same as ``--role-skeleton``.
env: [{name: ANSIBLE_GALAXY_ROLE_SKELETON}]
ini:
- {key: role_skeleton, section: galaxy}
type: path
GALAXY_ROLE_SKELETON_IGNORE:
name: Galaxy role skeleton ignore
default: ["^.git$", "^.*/.git_keep$"]
description: patterns of files to ignore inside a Galaxy role or collection skeleton directory.
env: [{name: ANSIBLE_GALAXY_ROLE_SKELETON_IGNORE}]
ini:
- {key: role_skeleton_ignore, section: galaxy}
type: list
GALAXY_COLLECTION_SKELETON:
name: Galaxy collection skeleton directory
description: Collection skeleton directory to use as a template for the ``init`` action in ``ansible-galaxy collection``, same as ``--collection-skeleton``.
env: [{name: ANSIBLE_GALAXY_COLLECTION_SKELETON}]
ini:
- {key: collection_skeleton, section: galaxy}
type: path
GALAXY_COLLECTION_SKELETON_IGNORE:
name: Galaxy collection skeleton ignore
default: ["^.git$", "^.*/.git_keep$"]
description: patterns of files to ignore inside a Galaxy collection skeleton directory.
env: [{name: ANSIBLE_GALAXY_COLLECTION_SKELETON_IGNORE}]
ini:
- {key: collection_skeleton_ignore, section: galaxy}
type: list
GALAXY_COLLECTIONS_PATH_WARNING:
name: "ansible-galaxy collection install collections path warnings"
description: "whether ``ansible-galaxy collection install`` should warn about ``--collections-path`` missing from configured :ref:`collections_paths`."
default: true
type: bool
env: [{name: ANSIBLE_GALAXY_COLLECTIONS_PATH_WARNING}]
ini:
- {key: collections_path_warning, section: galaxy}
version_added: "2.16"
# TODO: unused?
#GALAXY_SCMS:
# name: Galaxy SCMS
# default: git, hg
# description: Available galaxy source control management systems.
# env: [{name: ANSIBLE_GALAXY_SCMS}]
# ini:
# - {key: scms, section: galaxy}
# type: list
GALAXY_SERVER:
default: https://galaxy.ansible.com
description: "URL to prepend when roles don't specify the full URI, assume they are referencing this server as the source."
env: [{name: ANSIBLE_GALAXY_SERVER}]
ini:
- {key: server, section: galaxy}
yaml: {key: galaxy.server}
GALAXY_SERVER_LIST:
description:
- A list of Galaxy servers to use when installing a collection.
- The value corresponds to the config ini header ``[galaxy_server.{{item}}]`` which defines the server details.
- 'See :ref:`galaxy_server_config` for more details on how to define a Galaxy server.'
- The order of servers in this list is used as the order in which a collection is resolved.
- Setting this config option will ignore the :ref:`galaxy_server` config option.
env: [{name: ANSIBLE_GALAXY_SERVER_LIST}]
ini:
- {key: server_list, section: galaxy}
type: list
version_added: "2.9"
GALAXY_TOKEN_PATH:
default: '{{ ANSIBLE_HOME ~ "/galaxy_token" }}'
description: "Local path to galaxy access token file"
env: [{name: ANSIBLE_GALAXY_TOKEN_PATH}]
ini:
- {key: token_path, section: galaxy}
type: path
version_added: "2.9"
GALAXY_DISPLAY_PROGRESS:
default: ~
description:
- Some steps in ``ansible-galaxy`` display a progress wheel which can cause issues on certain displays or when
outputting the stdout to a file.
- This config option controls whether the display wheel is shown or not.
- The default is to show the display wheel if stdout has a tty.
env: [{name: ANSIBLE_GALAXY_DISPLAY_PROGRESS}]
ini:
- {key: display_progress, section: galaxy}
type: bool
version_added: "2.10"
GALAXY_CACHE_DIR:
default: '{{ ANSIBLE_HOME ~ "/galaxy_cache" }}'
description:
- The directory that stores cached responses from a Galaxy server.
- This is only used by the ``ansible-galaxy collection install`` and ``download`` commands.
- Cache files inside this dir will be ignored if they are world writable.
env:
- name: ANSIBLE_GALAXY_CACHE_DIR
ini:
- section: galaxy
key: cache_dir
type: path
version_added: '2.11'
GALAXY_DISABLE_GPG_VERIFY:
default: false
type: bool
env:
- name: ANSIBLE_GALAXY_DISABLE_GPG_VERIFY
ini:
- section: galaxy
key: disable_gpg_verify
description:
- Disable GPG signature verification during collection installation.
version_added: '2.13'
GALAXY_GPG_KEYRING:
type: path
env:
- name: ANSIBLE_GALAXY_GPG_KEYRING
ini:
- section: galaxy
key: gpg_keyring
description:
- Configure the keyring used for GPG signature verification during collection installation and verification.
version_added: '2.13'
GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES:
type: list
env:
- name: ANSIBLE_GALAXY_IGNORE_SIGNATURE_STATUS_CODES
ini:
- section: galaxy
key: ignore_signature_status_codes
description:
- A list of GPG status codes to ignore during GPG signature verification.
See L(https://github.com/gpg/gnupg/blob/master/doc/DETAILS#general-status-codes) for status code descriptions.
- If fewer signatures successfully verify the collection than `GALAXY_REQUIRED_VALID_SIGNATURE_COUNT`,
signature verification will fail even if all error codes are ignored.
choices:
- EXPSIG
- EXPKEYSIG
- REVKEYSIG
- BADSIG
- ERRSIG
- NO_PUBKEY
- MISSING_PASSPHRASE
- BAD_PASSPHRASE
- NODATA
- UNEXPECTED
- ERROR
- FAILURE
- BADARMOR
- KEYEXPIRED
- KEYREVOKED
- NO_SECKEY
GALAXY_REQUIRED_VALID_SIGNATURE_COUNT:
type: str
default: 1
env:
- name: ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT
ini:
- section: galaxy
key: required_valid_signature_count
description:
- The number of signatures that must be successful during GPG signature verification while installing or verifying collections.
- This should be a positive integer or all to indicate all signatures must successfully validate the collection.
- Prepend + to the value to fail if no valid signatures are found for the collection.
HOST_KEY_CHECKING:
# NOTE: constant not in use by ssh/paramiko plugins anymore, but they do support the same configuration sources
# TODO: check non ssh connection plugins for use/migration
name: Toggle host/key check
default: True
description:
- Set this to "False" if you want to avoid host key checking by the underlying connection plugin Ansible uses to connect to the host.
- Please read the documentation of the specific connection plugin used for details.
env: [{name: ANSIBLE_HOST_KEY_CHECKING}]
ini:
- {key: host_key_checking, section: defaults}
type: boolean
HOST_PATTERN_MISMATCH:
name: Control host pattern mismatch behaviour
default: 'warning'
description: This setting changes the behaviour of mismatched host patterns, it allows you to force a fatal error, a warning or just ignore it.
env: [{name: ANSIBLE_HOST_PATTERN_MISMATCH}]
ini:
- {key: host_pattern_mismatch, section: inventory}
choices:
<<: *basic_error
version_added: "2.8"
INTERPRETER_PYTHON:
name: Python interpreter path (or automatic discovery behavior) used for module execution
default: auto
env: [{name: ANSIBLE_PYTHON_INTERPRETER}]
ini:
- {key: interpreter_python, section: defaults}
vars:
- {name: ansible_python_interpreter}
version_added: "2.8"
description:
- Path to the Python interpreter to be used for module execution on remote targets, or an automatic discovery mode.
Supported discovery modes are ``auto`` (the default), ``auto_silent``, ``auto_legacy``, and ``auto_legacy_silent``.
All discovery modes employ a lookup table to use the included system Python (on distributions known to include one),
falling back to a fixed ordered list of well-known Python interpreter locations if a platform-specific default is not
available. The fallback behavior will issue a warning that the interpreter should be set explicitly (since interpreters
installed later may change which one is used). This warning behavior can be disabled by setting ``auto_silent`` or
``auto_legacy_silent``. The value of ``auto_legacy`` provides all the same behavior, but for backward-compatibility
with older Ansible releases that always defaulted to ``/usr/bin/python``, will use that interpreter if present.
_INTERPRETER_PYTHON_DISTRO_MAP:
name: Mapping of known included platform pythons for various Linux distros
default:
redhat:
'6': /usr/bin/python
'8': /usr/libexec/platform-python
'9': /usr/bin/python3
debian:
'8': /usr/bin/python
'10': /usr/bin/python3
fedora:
'23': /usr/bin/python3
ubuntu:
'14': /usr/bin/python
'16': /usr/bin/python3
version_added: "2.8"
# FUTURE: add inventory override once we're sure it can't be abused by a rogue target
# FUTURE: add a platform layer to the map so we could use for, eg, freebsd/macos/etc?
INTERPRETER_PYTHON_FALLBACK:
name: Ordered list of Python interpreters to check for in discovery
default:
- python3.12
- python3.11
- python3.10
- python3.9
- python3.8
- python3.7
- /usr/bin/python3
- /usr/libexec/platform-python
- /usr/bin/python
- python
vars:
- name: ansible_interpreter_python_fallback
type: list
version_added: "2.8"
TRANSFORM_INVALID_GROUP_CHARS:
name: Transform invalid characters in group names
default: 'never'
description:
- Make ansible transform invalid characters in group names supplied by inventory sources.
env: [{name: ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS}]
ini:
- {key: force_valid_group_names, section: defaults}
type: string
choices:
always: it will replace any invalid characters with '_' (underscore) and warn the user
never: it will allow for the group name but warn about the issue
ignore: it does the same as 'never', without issuing a warning
silently: it does the same as 'always', without issuing a warning
version_added: '2.8'
INVALID_TASK_ATTRIBUTE_FAILED:
name: Controls whether invalid attributes for a task result in errors instead of warnings
default: True
description: If 'false', invalid attributes for a task will result in warnings instead of errors.
type: boolean
env:
- name: ANSIBLE_INVALID_TASK_ATTRIBUTE_FAILED
ini:
- key: invalid_task_attribute_failed
section: defaults
version_added: "2.7"
INVENTORY_ANY_UNPARSED_IS_FAILED:
name: Controls whether any unparsable inventory source is a fatal error
default: False
description: >
If 'true', it is a fatal error when any given inventory source
cannot be successfully parsed by any available inventory plugin;
otherwise, this situation only attracts a warning.
type: boolean
env: [{name: ANSIBLE_INVENTORY_ANY_UNPARSED_IS_FAILED}]
ini:
- {key: any_unparsed_is_failed, section: inventory}
version_added: "2.7"
INVENTORY_CACHE_ENABLED:
name: Inventory caching enabled
default: False
description:
- Toggle to turn on inventory caching.
- This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`.
- The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory configuration.
- This message will be removed in 2.16.
env: [{name: ANSIBLE_INVENTORY_CACHE}]
ini:
- {key: cache, section: inventory}
type: bool
INVENTORY_CACHE_PLUGIN:
name: Inventory cache plugin
description:
- The plugin for caching inventory.
- This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`.
- The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory and fact cache configuration.
- This message will be removed in 2.16.
env: [{name: ANSIBLE_INVENTORY_CACHE_PLUGIN}]
ini:
- {key: cache_plugin, section: inventory}
INVENTORY_CACHE_PLUGIN_CONNECTION:
name: Inventory cache plugin URI to override the defaults section
description:
- The inventory cache connection.
- This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`.
- The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory and fact cache configuration.
- This message will be removed in 2.16.
env: [{name: ANSIBLE_INVENTORY_CACHE_CONNECTION}]
ini:
- {key: cache_connection, section: inventory}
INVENTORY_CACHE_PLUGIN_PREFIX:
name: Inventory cache plugin table prefix
description:
- The table prefix for the cache plugin.
- This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`.
- The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory and fact cache configuration.
- This message will be removed in 2.16.
env: [{name: ANSIBLE_INVENTORY_CACHE_PLUGIN_PREFIX}]
default: ansible_inventory_
ini:
- {key: cache_prefix, section: inventory}
INVENTORY_CACHE_TIMEOUT:
name: Inventory cache plugin expiration timeout
description:
- Expiration timeout for the inventory cache plugin data.
- This setting has been moved to the individual inventory plugins as a plugin option :ref:`inventory_plugins`.
- The existing configuration settings are still accepted with the inventory plugin adding additional options from inventory and fact cache configuration.
- This message will be removed in 2.16.
default: 3600
env: [{name: ANSIBLE_INVENTORY_CACHE_TIMEOUT}]
ini:
- {key: cache_timeout, section: inventory}
INVENTORY_ENABLED:
name: Active Inventory plugins
default: ['host_list', 'script', 'auto', 'yaml', 'ini', 'toml']
description: List of enabled inventory plugins, it also determines the order in which they are used.
env: [{name: ANSIBLE_INVENTORY_ENABLED}]
ini:
- {key: enable_plugins, section: inventory}
type: list
INVENTORY_EXPORT:
name: Set ansible-inventory into export mode
default: False
description: Controls if ansible-inventory will accurately reflect Ansible's view into inventory or its optimized for exporting.
env: [{name: ANSIBLE_INVENTORY_EXPORT}]
ini:
- {key: export, section: inventory}
type: bool
INVENTORY_IGNORE_EXTS:
name: Inventory ignore extensions
default: "{{(REJECT_EXTS + ('.orig', '.ini', '.cfg', '.retry'))}}"
description: List of extensions to ignore when using a directory as an inventory source.
env: [{name: ANSIBLE_INVENTORY_IGNORE}]
ini:
- {key: inventory_ignore_extensions, section: defaults}
- {key: ignore_extensions, section: inventory}
type: list
INVENTORY_IGNORE_PATTERNS:
name: Inventory ignore patterns
default: []
description: List of patterns to ignore when using a directory as an inventory source.
env: [{name: ANSIBLE_INVENTORY_IGNORE_REGEX}]
ini:
- {key: inventory_ignore_patterns, section: defaults}
- {key: ignore_patterns, section: inventory}
type: list
INVENTORY_UNPARSED_IS_FAILED:
name: Unparsed Inventory failure
default: False
description: >
If 'true' it is a fatal error if every single potential inventory
source fails to parse, otherwise, this situation will only attract a
warning.
env: [{name: ANSIBLE_INVENTORY_UNPARSED_FAILED}]
ini:
- {key: unparsed_is_failed, section: inventory}
type: bool
MAX_FILE_SIZE_FOR_DIFF:
name: Diff maximum file size
default: 104448
description: Maximum size of files to be considered for diff display.
env: [{name: ANSIBLE_MAX_DIFF_SIZE}]
ini:
- {key: max_diff_size, section: defaults}
type: int
NETWORK_GROUP_MODULES:
name: Network module families
default: [eos, nxos, ios, iosxr, junos, enos, ce, vyos, sros, dellos9, dellos10, dellos6, asa, aruba, aireos, bigip, ironware, onyx, netconf, exos, voss, slxos]
description: 'TODO: write it'
env:
- name: ANSIBLE_NETWORK_GROUP_MODULES
ini:
- {key: network_group_modules, section: defaults}
type: list
yaml: {key: defaults.network_group_modules}
INJECT_FACTS_AS_VARS:
default: True
description:
- Facts are available inside the `ansible_facts` variable, this setting also pushes them as their own vars in the main namespace.
- Unlike inside the `ansible_facts` dictionary, these will have an `ansible_` prefix.
env: [{name: ANSIBLE_INJECT_FACT_VARS}]
ini:
- {key: inject_facts_as_vars, section: defaults}
type: boolean
version_added: "2.5"
MODULE_IGNORE_EXTS:
name: Module ignore extensions
default: "{{(REJECT_EXTS + ('.yaml', '.yml', '.ini'))}}"
description:
- List of extensions to ignore when looking for modules to load.
- This is for rejecting script and binary module fallback extensions.
env: [{name: ANSIBLE_MODULE_IGNORE_EXTS}]
ini:
- {key: module_ignore_exts, section: defaults}
type: list
MODULE_STRICT_UTF8_RESPONSE:
name: Module strict UTF-8 response
description:
- Enables whether module responses are evaluated for containing non-UTF-8 data.
- Disabling this may result in unexpected behavior.
- Only ansible-core should evaluate this configuration.
env: [{name: ANSIBLE_MODULE_STRICT_UTF8_RESPONSE}]
ini:
- {key: module_strict_utf8_response, section: defaults}
type: bool
default: True
OLD_PLUGIN_CACHE_CLEARING:
description: Previously Ansible would only clear some of the plugin loading caches when loading new roles, this led to some behaviors in which a plugin loaded in previous plays would be unexpectedly 'sticky'. This setting allows the user to return to that behavior.
env: [{name: ANSIBLE_OLD_PLUGIN_CACHE_CLEAR}]
ini:
- {key: old_plugin_cache_clear, section: defaults}
type: boolean
default: False
version_added: "2.8"
PAGER:
name: pager application to use
default: less
descrioption:
- for the cases in which Ansible needs to return output in a pageable fashion, this chooses the application to use.
ini:
- section: defaults
key: pager
version_added: '2.15'
env:
- name: ANSIBLE_PAGER
version_added: '2.15'
- name: PAGER
PARAMIKO_HOST_KEY_AUTO_ADD:
default: False
description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_HOST_KEY_AUTO_ADD}]
ini:
- {key: host_key_auto_add, section: paramiko_connection}
type: boolean
deprecated:
why: This option was moved to the plugin itself
version: "2.20"
alternatives: Use the option from the plugin itself.
PARAMIKO_LOOK_FOR_KEYS:
name: look for keys
default: True
description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_LOOK_FOR_KEYS}]
ini:
- {key: look_for_keys, section: paramiko_connection}
type: boolean
deprecated:
why: This option was moved to the plugin itself
version: "2.20"
alternatives: Use the option from the plugin itself.
PERSISTENT_CONTROL_PATH_DIR:
name: Persistence socket path
default: '{{ ANSIBLE_HOME ~ "/pc" }}'
description: Path to the socket to be used by the connection persistence system.
env: [{name: ANSIBLE_PERSISTENT_CONTROL_PATH_DIR}]
ini:
- {key: control_path_dir, section: persistent_connection}
type: path
PERSISTENT_CONNECT_TIMEOUT:
name: Persistence timeout
default: 30
description: This controls how long the persistent connection will remain idle before it is destroyed.
env: [{name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT}]
ini:
- {key: connect_timeout, section: persistent_connection}
type: integer
PERSISTENT_CONNECT_RETRY_TIMEOUT:
name: Persistence connection retry timeout
default: 15
description: This controls the retry timeout for persistent connection to connect to the local domain socket.
env: [{name: ANSIBLE_PERSISTENT_CONNECT_RETRY_TIMEOUT}]
ini:
- {key: connect_retry_timeout, section: persistent_connection}
type: integer
PERSISTENT_COMMAND_TIMEOUT:
name: Persistence command timeout
default: 30
description: This controls the amount of time to wait for a response from a remote device before timing out a persistent connection.
env: [{name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT}]
ini:
- {key: command_timeout, section: persistent_connection}
type: int
PLAYBOOK_DIR:
name: playbook dir override for non-playbook CLIs (ala --playbook-dir)
version_added: "2.9"
description:
- A number of non-playbook CLIs have a ``--playbook-dir`` argument; this sets the default value for it.
env: [{name: ANSIBLE_PLAYBOOK_DIR}]
ini: [{key: playbook_dir, section: defaults}]
type: path
PLAYBOOK_VARS_ROOT:
name: playbook vars files root
default: top
version_added: "2.4.1"
description:
- This sets which playbook dirs will be used as a root to process vars plugins, which includes finding host_vars/group_vars.
env: [{name: ANSIBLE_PLAYBOOK_VARS_ROOT}]
ini:
- {key: playbook_vars_root, section: defaults}
choices:
top: follows the traditional behavior of using the top playbook in the chain to find the root directory.
bottom: follows the 2.4.0 behavior of using the current playbook to find the root directory.
all: examines from the first parent to the current playbook.
PLUGIN_FILTERS_CFG:
name: Config file for limiting valid plugins
default: null
version_added: "2.5.0"
description:
- "A path to configuration for filtering which plugins installed on the system are allowed to be used."
- "See :ref:`plugin_filtering_config` for details of the filter file's format."
- " The default is /etc/ansible/plugin_filters.yml"
ini:
- key: plugin_filters_cfg
section: defaults
type: path
PYTHON_MODULE_RLIMIT_NOFILE:
name: Adjust maximum file descriptor soft limit during Python module execution
description:
- Attempts to set RLIMIT_NOFILE soft limit to the specified value when executing Python modules (can speed up subprocess usage on
Python 2.x. See https://bugs.python.org/issue11284). The value will be limited by the existing hard limit. Default
value of 0 does not attempt to adjust existing system-defined limits.
default: 0
env:
- {name: ANSIBLE_PYTHON_MODULE_RLIMIT_NOFILE}
ini:
- {key: python_module_rlimit_nofile, section: defaults}
vars:
- {name: ansible_python_module_rlimit_nofile}
version_added: '2.8'
RETRY_FILES_ENABLED:
name: Retry files
default: False
description: This controls whether a failed Ansible playbook should create a .retry file.
env: [{name: ANSIBLE_RETRY_FILES_ENABLED}]
ini:
- {key: retry_files_enabled, section: defaults}
type: bool
RETRY_FILES_SAVE_PATH:
name: Retry files path
default: ~
description:
- This sets the path in which Ansible will save .retry files when a playbook fails and retry files are enabled.
- This file will be overwritten after each run with the list of failed hosts from all plays.
env: [{name: ANSIBLE_RETRY_FILES_SAVE_PATH}]
ini:
- {key: retry_files_save_path, section: defaults}
type: path
RUN_VARS_PLUGINS:
name: When should vars plugins run relative to inventory
default: demand
description:
- This setting can be used to optimize vars_plugin usage depending on the user's inventory size and play selection.
env: [{name: ANSIBLE_RUN_VARS_PLUGINS}]
ini:
- {key: run_vars_plugins, section: defaults}
type: str
choices:
demand: will run vars_plugins relative to inventory sources anytime vars are 'demanded' by tasks.
start: will run vars_plugins relative to inventory sources after importing that inventory source.
version_added: "2.10"
SHOW_CUSTOM_STATS:
name: Display custom stats
default: False
description: 'This adds the custom stats set via the set_stats plugin to the default output.'
env: [{name: ANSIBLE_SHOW_CUSTOM_STATS}]
ini:
- {key: show_custom_stats, section: defaults}
type: bool
STRING_TYPE_FILTERS:
name: Filters to preserve strings
default: [string, to_json, to_nice_json, to_yaml, to_nice_yaml, ppretty, json]
description:
- "This list of filters avoids 'type conversion' when templating variables."
- Useful when you want to avoid conversion into lists or dictionaries for JSON strings, for example.
env: [{name: ANSIBLE_STRING_TYPE_FILTERS}]
ini:
- {key: dont_type_filters, section: jinja2}
type: list
SYSTEM_WARNINGS:
name: System warnings
default: True
description:
- Allows disabling of warnings related to potential issues on the system running Ansible itself (not on the managed hosts).
- These may include warnings about third-party packages or other conditions that should be resolved if possible.
env: [{name: ANSIBLE_SYSTEM_WARNINGS}]
ini:
- {key: system_warnings, section: defaults}
type: boolean
TAGS_RUN:
name: Run Tags
default: []
type: list
description: default list of tags to run in your plays, Skip Tags has precedence.
env: [{name: ANSIBLE_RUN_TAGS}]
ini:
- {key: run, section: tags}
version_added: "2.5"
TAGS_SKIP:
name: Skip Tags
default: []
type: list
description: default list of tags to skip in your plays, has precedence over Run Tags
env: [{name: ANSIBLE_SKIP_TAGS}]
ini:
- {key: skip, section: tags}
version_added: "2.5"
TASK_TIMEOUT:
name: Task Timeout
default: 0
description:
- Set the maximum time (in seconds) that a task can run for.
- If set to 0 (the default) there is no timeout.
env: [{name: ANSIBLE_TASK_TIMEOUT}]
ini:
- {key: task_timeout, section: defaults}
type: integer
version_added: '2.10'
WORKER_SHUTDOWN_POLL_COUNT:
name: Worker Shutdown Poll Count
default: 0
description:
- The maximum number of times to check Task Queue Manager worker processes to verify they have exited cleanly.
- After this limit is reached any worker processes still running will be terminated.
- This is for internal use only.
env: [{name: ANSIBLE_WORKER_SHUTDOWN_POLL_COUNT}]
type: integer
version_added: '2.10'
WORKER_SHUTDOWN_POLL_DELAY:
name: Worker Shutdown Poll Delay
default: 0.1
description:
- The number of seconds to sleep between polling loops when checking Task Queue Manager worker processes to verify they have exited cleanly.
- This is for internal use only.
env: [{name: ANSIBLE_WORKER_SHUTDOWN_POLL_DELAY}]
type: float
version_added: '2.10'
USE_PERSISTENT_CONNECTIONS:
name: Persistence
default: False
description: Toggles the use of persistence for connections.
env: [{name: ANSIBLE_USE_PERSISTENT_CONNECTIONS}]
ini:
- {key: use_persistent_connections, section: defaults}
type: boolean
VARIABLE_PLUGINS_ENABLED:
name: Vars plugin enabled list
default: ['host_group_vars']
description: Accept list for variable plugins that require it.
env: [{name: ANSIBLE_VARS_ENABLED}]
ini:
- {key: vars_plugins_enabled, section: defaults}
type: list
version_added: "2.10"
VARIABLE_PRECEDENCE:
name: Group variable precedence
default: ['all_inventory', 'groups_inventory', 'all_plugins_inventory', 'all_plugins_play', 'groups_plugins_inventory', 'groups_plugins_play']
description: Allows to change the group variable precedence merge order.
env: [{name: ANSIBLE_PRECEDENCE}]
ini:
- {key: precedence, section: defaults}
type: list
version_added: "2.4"
WIN_ASYNC_STARTUP_TIMEOUT:
name: Windows Async Startup Timeout
default: 5
description:
- For asynchronous tasks in Ansible (covered in Asynchronous Actions and Polling),
this is how long, in seconds, to wait for the task spawned by Ansible to connect back to the named pipe used
on Windows systems. The default is 5 seconds. This can be too low on slower systems, or systems under heavy load.
- This is not the total time an async command can run for, but is a separate timeout to wait for an async command to
start. The task will only start to be timed against its async_timeout once it has connected to the pipe, so the
overall maximum duration the task can take will be extended by the amount specified here.
env: [{name: ANSIBLE_WIN_ASYNC_STARTUP_TIMEOUT}]
ini:
- {key: win_async_startup_timeout, section: defaults}
type: integer
vars:
- {name: ansible_win_async_startup_timeout}
version_added: '2.10'
YAML_FILENAME_EXTENSIONS:
name: Valid YAML extensions
default: [".yml", ".yaml", ".json"]
description:
- "Check all of these extensions when looking for 'variable' files which should be YAML or JSON or vaulted versions of these."
- 'This affects vars_files, include_vars, inventory and vars plugins among others.'
env:
- name: ANSIBLE_YAML_FILENAME_EXT
ini:
- section: defaults
key: yaml_valid_extensions
type: list
NETCONF_SSH_CONFIG:
description: This variable is used to enable bastion/jump host with netconf connection. If set to True the bastion/jump
host ssh settings should be present in ~/.ssh/config file, alternatively it can be set
to custom ssh configuration file path to read the bastion/jump host settings.
env: [{name: ANSIBLE_NETCONF_SSH_CONFIG}]
ini:
- {key: ssh_config, section: netconf_connection}
yaml: {key: netconf_connection.ssh_config}
default: null
STRING_CONVERSION_ACTION:
version_added: '2.8'
description:
- Action to take when a module parameter value is converted to a string (this does not affect variables).
For string parameters, values such as '1.00', "['a', 'b',]", and 'yes', 'y', etc.
will be converted by the YAML parser unless fully quoted.
- Valid options are 'error', 'warn', and 'ignore'.
- Since 2.8, this option defaults to 'warn' but will change to 'error' in 2.12.
default: 'warn'
env:
- name: ANSIBLE_STRING_CONVERSION_ACTION
ini:
- section: defaults
key: string_conversion_action
type: string
deprecated:
why: This option is no longer used in the Ansible Core code base.
version: "2.19"
alternatives: There is no alternative at the moment. A different mechanism would have to be implemented in the current code base.
VALIDATE_ACTION_GROUP_METADATA:
version_added: '2.12'
description:
- A toggle to disable validating a collection's 'metadata' entry for a module_defaults action group.
Metadata containing unexpected fields or value types will produce a warning when this is True.
default: True
env: [{name: ANSIBLE_VALIDATE_ACTION_GROUP_METADATA}]
ini:
- section: defaults
key: validate_action_group_metadata
type: bool
VERBOSE_TO_STDERR:
version_added: '2.8'
description:
- Force 'verbose' option to use stderr instead of stdout
default: False
env:
- name: ANSIBLE_VERBOSE_TO_STDERR
ini:
- section: defaults
key: verbose_to_stderr
type: bool
...
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,710 |
Configurable sampling/transfer of control-side task context metadata to targets
|
### Summary
We're often asked how to include arbitrary control-side contextual metadata with task invocations, and to include that metadata in target-side task log messages. e.g.: sending an AWX/Controller Job ID to the target hosts on each module invocation that occurred from that job, and logging it in the module-generated syslog/Windows Application Event Log entries for future correlation with the owning job.
I've not seen any consensus on precisely *which* data to include; one person's "critical forensic correlation data" is another's "unacceptable disclosure of sensitive execution detail". Seems like we'd need a generic facility to specify environment vars and/or hostvars to sample on the control host to be included with task invocations (under a reserved dictionary arg), and adjust the module logging APIs to include them.
My initial thought is to define a new core config element (defaulting to none) that allows the user to define a templated expression that would be rendered as part of each task's templating under a host context. The rendered result would be sent to modules as a new reserved internal module var. The module logging APIs would then include this value verbatim, when present. Other module code would also have access to the value, which could be used for anything. The new config would be settable either via ansible.cfg or an envvar, making it easier for AWX/Controller to later provide a mechanism to configure it for jobs using core versions that support it, while older versions would just silently ignore it.
Maybe something like:
```
ANSIBLE_ADDITIONAL_TASK_CONTEXT='{{awx_job_id}}'
```
When this config is non-empty, the defined template would be rendered for each task/host invocation, and its result included in a new `_ansible_additional_task_context` reserved module var. The resulting value, as with any Ansible template expression, could be of arbitrary complexity (eg, returning a data structure instead of just a scalar). The module logging APIs would include the serialized value verbatim in log messages when it is present, eg "ansible_additional_task_context=(whatever the value was)".
### Issue Type
Feature Idea
### Component Name
module invocation and logging
### Additional Information
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81710
|
https://github.com/ansible/ansible/pull/81711
|
4208bdbbcd994251579409ad533b40c9b0543550
|
1dd0d6fad70d7d4f423dac41822da65ff9ec95ef
| 2023-09-18T16:35:01Z |
python
| 2023-11-30T18:12:55Z |
lib/ansible/module_utils/basic.py
|
# Copyright (c), Michael DeHaan <[email protected]>, 2012-2013
# Copyright (c), Toshio Kuratomi <[email protected]> 2016
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import annotations
import json
import sys
# Used for determining if the system is running a new enough python version
# and should only restrict on our documented minimum versions
_PY_MIN = (3, 7)
if sys.version_info < _PY_MIN:
print(json.dumps(dict(
failed=True,
msg=f"ansible-core requires a minimum of Python version {'.'.join(map(str, _PY_MIN))}. Current version: {''.join(sys.version.splitlines())}",
)))
sys.exit(1)
# Ansible modules can be written in any language.
# The functions available here can be used to do many common tasks,
# to simplify development of Python modules.
import __main__
import atexit
import errno
import datetime
import grp
import fcntl
import locale
import os
import pwd
import platform
import re
import select
import selectors
import shlex
import shutil
import signal
import stat
import subprocess
import tempfile
import time
import traceback
import types
from itertools import chain, repeat
try:
import syslog
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG = False
try:
from systemd import journal, daemon as systemd_daemon
# Makes sure that systemd.journal has method sendv()
# Double check that journal has method sendv (some packages don't)
# check if the system is running under systemd
has_journal = hasattr(journal, 'sendv') and systemd_daemon.booted()
except (ImportError, AttributeError):
# AttributeError would be caused from use of .booted() if wrong systemd
has_journal = False
HAVE_SELINUX = False
try:
from ansible.module_utils.compat import selinux
HAVE_SELINUX = True
except ImportError:
pass
# Python2 & 3 way to get NoneType
NoneType = type(None)
from ._text import to_native, to_bytes, to_text
from ansible.module_utils.common.text.converters import (
jsonify,
container_to_bytes as json_dict_unicode_to_bytes,
container_to_text as json_dict_bytes_to_unicode,
)
from ansible.module_utils.common.arg_spec import ModuleArgumentSpecValidator
from ansible.module_utils.common.text.formatters import (
lenient_lowercase,
bytes_to_human,
human_to_bytes,
SIZE_RANGES,
)
import hashlib
def _get_available_hash_algorithms():
"""Return a dictionary of available hash function names and their associated function."""
try:
# Algorithms available in Python 2.7.9+ and Python 3.2+
# https://docs.python.org/2.7/library/hashlib.html#hashlib.algorithms_available
# https://docs.python.org/3.2/library/hashlib.html#hashlib.algorithms_available
algorithm_names = hashlib.algorithms_available
except AttributeError:
# Algorithms in Python 2.7.x (used only for Python 2.7.0 through 2.7.8)
# https://docs.python.org/2.7/library/hashlib.html#hashlib.hashlib.algorithms
algorithm_names = set(hashlib.algorithms)
algorithms = {}
for algorithm_name in algorithm_names:
algorithm_func = getattr(hashlib, algorithm_name, None)
if algorithm_func:
try:
# Make sure the algorithm is actually available for use.
# Not all algorithms listed as available are actually usable.
# For example, md5 is not available in FIPS mode.
algorithm_func()
except Exception:
pass
else:
algorithms[algorithm_name] = algorithm_func
return algorithms
AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms()
from ansible.module_utils.six.moves.collections_abc import (
KeysView,
Mapping, MutableMapping,
Sequence, MutableSequence,
Set, MutableSet,
)
from ansible.module_utils.common.locale import get_best_parsable_locale
from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.file import (
_PERM_BITS as PERM_BITS,
_EXEC_PERM_BITS as EXEC_PERM_BITS,
_DEFAULT_PERM as DEFAULT_PERM,
is_executable,
format_attributes,
get_flags_from_attributes,
FILE_ATTRIBUTES,
)
from ansible.module_utils.common.sys_info import (
get_distribution,
get_distribution_version,
get_platform_subclass,
)
from ansible.module_utils.common.parameters import (
env_fallback,
remove_values,
sanitize_keys,
DEFAULT_TYPE_VALIDATORS,
PASS_VARS,
PASS_BOOLS,
)
from ansible.module_utils.errors import AnsibleFallbackNotFound, AnsibleValidationErrorMultiple, UnsupportedError
from ansible.module_utils.six import (
PY2,
PY3,
b,
binary_type,
integer_types,
iteritems,
string_types,
text_type,
)
from ansible.module_utils.six.moves import map, reduce, shlex_quote
from ansible.module_utils.common.validation import (
check_missing_parameters,
safe_eval,
)
from ansible.module_utils.common._utils import get_all_subclasses as _get_all_subclasses
from ansible.module_utils.parsing.convert_bool import BOOLEANS, BOOLEANS_FALSE, BOOLEANS_TRUE, boolean
from ansible.module_utils.common.warnings import (
deprecate,
get_deprecation_messages,
get_warning_messages,
warn,
)
# Note: When getting Sequence from collections, it matches with strings. If
# this matters, make sure to check for strings before checking for sequencetype
SEQUENCETYPE = frozenset, KeysView, Sequence
PASSWORD_MATCH = re.compile(r'^(?:.+[-_\s])?pass(?:[-_\s]?(?:word|phrase|wrd|wd)?)(?:[-_\s].+)?$', re.I)
imap = map
try:
# Python 2
unicode # type: ignore[used-before-def] # pylint: disable=used-before-assignment
except NameError:
# Python 3
unicode = text_type
try:
# Python 2
basestring # type: ignore[used-before-def,has-type] # pylint: disable=used-before-assignment
except NameError:
# Python 3
basestring = string_types
# End of deprecated names
# Internal global holding passed in params. This is consulted in case
# multiple AnsibleModules are created. Otherwise each AnsibleModule would
# attempt to read from stdin. Other code should not use this directly as it
# is an internal implementation detail
_ANSIBLE_ARGS = None
FILE_COMMON_ARGUMENTS = dict(
# These are things we want. About setting metadata (mode, ownership, permissions in general) on
# created files (these are used by set_fs_attributes_if_different and included in
# load_file_common_arguments)
mode=dict(type='raw'),
owner=dict(type='str'),
group=dict(type='str'),
seuser=dict(type='str'),
serole=dict(type='str'),
selevel=dict(type='str'),
setype=dict(type='str'),
attributes=dict(type='str', aliases=['attr']),
unsafe_writes=dict(type='bool', default=False, fallback=(env_fallback, ['ANSIBLE_UNSAFE_WRITES'])), # should be available to any module using atomic_move
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
# Used for parsing symbolic file perms
MODE_OPERATOR_RE = re.compile(r'[+=-]')
USERS_RE = re.compile(r'^[ugo]+$')
PERMS_RE = re.compile(r'^[rwxXstugo]*$')
#
# Deprecated functions
#
def get_platform():
'''
**Deprecated** Use :py:func:`platform.system` directly.
:returns: Name of the platform the module is running on in a native string
Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is
the result of calling :py:func:`platform.system`.
'''
return platform.system()
# End deprecated functions
#
# Compat shims
#
def load_platform_subclass(cls, *args, **kwargs):
"""**Deprecated**: Use ansible.module_utils.common.sys_info.get_platform_subclass instead"""
platform_cls = get_platform_subclass(cls)
return super(cls, platform_cls).__new__(platform_cls)
def get_all_subclasses(cls):
"""**Deprecated**: Use ansible.module_utils.common._utils.get_all_subclasses instead"""
return list(_get_all_subclasses(cls))
# End compat shims
def heuristic_log_sanitize(data, no_log_values=None):
''' Remove strings that look like passwords from log messages '''
# Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are
# not passwds
# begin: start of a passwd containing string
# end: end of a passwd containing string
# sep: char between user and passwd
# prev_begin: where in the overall string to start a search for
# a passwd
# sep_search_end: where in the string to end a search for the sep
data = to_native(data)
output = []
begin = len(data)
prev_begin = begin
sep = 1
while sep:
# Find the potential end of a passwd
try:
end = data.rindex('@', 0, begin)
except ValueError:
# No passwd in the rest of the data
output.insert(0, data[0:begin])
break
# Search for the beginning of a passwd
sep = None
sep_search_end = end
while not sep:
# URL-style username+password
try:
begin = data.rindex('://', 0, sep_search_end)
except ValueError:
# No url style in the data, check for ssh style in the
# rest of the string
begin = 0
# Search for separator
try:
sep = data.index(':', begin + 3, end)
except ValueError:
# No separator; choices:
if begin == 0:
# Searched the whole string so there's no password
# here. Return the remaining data
output.insert(0, data[0:prev_begin])
break
# Search for a different beginning of the password field.
sep_search_end = begin
continue
if sep:
# Password was found; remove it.
output.insert(0, data[end:prev_begin])
output.insert(0, '********')
output.insert(0, data[begin:sep + 1])
prev_begin = begin
output = ''.join(output)
if no_log_values:
output = remove_values(output, no_log_values)
return output
def _load_params():
''' read the modules parameters and store them globally.
This function may be needed for certain very dynamic custom modules which
want to process the parameters that are being handed the module. Since
this is so closely tied to the implementation of modules we cannot
guarantee API stability for it (it may change between versions) however we
will try not to break it gratuitously. It is certainly more future-proof
to call this function and consume its outputs than to implement the logic
inside it as a copy in your own code.
'''
global _ANSIBLE_ARGS
if _ANSIBLE_ARGS is not None:
buffer = _ANSIBLE_ARGS
else:
# debug overrides to read args from file or cmdline
# Avoid tracebacks when locale is non-utf8
# We control the args and we pass them as utf8
if len(sys.argv) > 1:
if os.path.isfile(sys.argv[1]):
fd = open(sys.argv[1], 'rb')
buffer = fd.read()
fd.close()
else:
buffer = sys.argv[1]
if PY3:
buffer = buffer.encode('utf-8', errors='surrogateescape')
# default case, read from stdin
else:
if PY2:
buffer = sys.stdin.read()
else:
buffer = sys.stdin.buffer.read()
_ANSIBLE_ARGS = buffer
try:
params = json.loads(buffer.decode('utf-8'))
except ValueError:
# This helper is used too early for fail_json to work.
print('\n{"msg": "Error: Module unable to decode stdin/parameters as valid JSON. Unable to parse what parameters were passed", "failed": true}')
sys.exit(1)
if PY2:
params = json_dict_unicode_to_bytes(params)
try:
return params['ANSIBLE_MODULE_ARGS']
except KeyError:
# This helper does not have access to fail_json so we have to print
# json output on our own.
print('\n{"msg": "Error: Module unable to locate ANSIBLE_MODULE_ARGS in JSON data from stdin. Unable to figure out what parameters were passed", '
'"failed": true}')
sys.exit(1)
def missing_required_lib(library, reason=None, url=None):
hostname = platform.node()
msg = "Failed to import the required Python library (%s) on %s's Python %s." % (library, hostname, sys.executable)
if reason:
msg += " This is required %s." % reason
if url:
msg += " See %s for more info." % url
msg += (" Please read the module documentation and install it in the appropriate location."
" If the required library is installed, but Ansible is using the wrong Python interpreter,"
" please consult the documentation on ansible_python_interpreter")
return msg
class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
mutually_exclusive=None, required_together=None,
required_one_of=None, add_file_common_args=False,
supports_check_mode=False, required_if=None, required_by=None):
'''
Common code for quickly building an ansible module in Python
(although you can write modules with anything that can return JSON).
See :ref:`developing_modules_general` for a general introduction
and :ref:`developing_program_flow_modules` for more detailed explanation.
'''
self._name = os.path.basename(__file__) # initialize name until we can parse from options
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
self.bypass_checks = bypass_checks
self.no_log = no_log
self.mutually_exclusive = mutually_exclusive
self.required_together = required_together
self.required_one_of = required_one_of
self.required_if = required_if
self.required_by = required_by
self.cleanup_files = []
self._debug = False
self._diff = False
self._socket_path = None
self._shell = None
self._syslog_facility = 'LOG_USER'
self._verbosity = 0
# May be used to set modifications to the environment for any
# run_command invocation
self.run_command_environ_update = {}
self._clean = {}
self._string_conversion_action = ''
self.aliases = {}
self._legal_inputs = []
self._options_context = list()
self._tmpdir = None
if add_file_common_args:
for k, v in FILE_COMMON_ARGUMENTS.items():
if k not in self.argument_spec:
self.argument_spec[k] = v
# Save parameter values that should never be logged
self.no_log_values = set()
# check the locale as set by the current environment, and reset to
# a known valid (LANG=C) if it's an invalid/unavailable locale
self._check_locale()
self._load_params()
self._set_internal_properties()
self.validator = ModuleArgumentSpecValidator(self.argument_spec,
self.mutually_exclusive,
self.required_together,
self.required_one_of,
self.required_if,
self.required_by,
)
self.validation_result = self.validator.validate(self.params)
self.params.update(self.validation_result.validated_parameters)
self.no_log_values.update(self.validation_result._no_log_values)
self.aliases.update(self.validation_result._aliases)
try:
error = self.validation_result.errors[0]
if isinstance(error, UnsupportedError) and self._ignore_unknown_opts:
error = None
except IndexError:
error = None
# Fail for validation errors, even in check mode
if error:
msg = self.validation_result.errors.msg
if isinstance(error, UnsupportedError):
msg = "Unsupported parameters for ({name}) {kind}: {msg}".format(name=self._name, kind='module', msg=msg)
self.fail_json(msg=msg)
if self.check_mode and not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module (%s) does not support check mode" % self._name)
# This is for backwards compatibility only.
self._CHECK_ARGUMENT_TYPES_DISPATCHER = DEFAULT_TYPE_VALIDATORS
if not self.no_log:
self._log_invocation()
# selinux state caching
self._selinux_enabled = None
self._selinux_mls_enabled = None
self._selinux_initial_context = None
# finally, make sure we're in a sane working dir
self._set_cwd()
@property
def tmpdir(self):
# if _ansible_tmpdir was not set and we have a remote_tmp,
# the module needs to create it and clean it up once finished.
# otherwise we create our own module tmp dir from the system defaults
if self._tmpdir is None:
basedir = None
if self._remote_tmp is not None:
basedir = os.path.expanduser(os.path.expandvars(self._remote_tmp))
if basedir is not None and not os.path.exists(basedir):
try:
os.makedirs(basedir, mode=0o700)
except (OSError, IOError) as e:
self.warn("Unable to use %s as temporary directory, "
"failing back to system: %s" % (basedir, to_native(e)))
basedir = None
else:
self.warn("Module remote_tmp %s did not exist and was "
"created with a mode of 0700, this may cause"
" issues when running as another user. To "
"avoid this, create the remote_tmp dir with "
"the correct permissions manually" % basedir)
basefile = "ansible-moduletmp-%s-" % time.time()
try:
tmpdir = tempfile.mkdtemp(prefix=basefile, dir=basedir)
except (OSError, IOError) as e:
self.fail_json(
msg="Failed to create remote module tmp path at dir %s "
"with prefix %s: %s" % (basedir, basefile, to_native(e))
)
if not self._keep_remote_files:
atexit.register(shutil.rmtree, tmpdir)
self._tmpdir = tmpdir
return self._tmpdir
def warn(self, warning):
warn(warning)
self.log('[WARNING] %s' % warning)
def deprecate(self, msg, version=None, date=None, collection_name=None):
if version is not None and date is not None:
raise AssertionError("implementation error -- version and date must not both be set")
deprecate(msg, version=version, date=date, collection_name=collection_name)
# For compatibility, we accept that neither version nor date is set,
# and treat that the same as if version would not have been set
if date is not None:
self.log('[DEPRECATION WARNING] %s %s' % (msg, date))
else:
self.log('[DEPRECATION WARNING] %s %s' % (msg, version))
def load_file_common_arguments(self, params, path=None):
'''
many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly
available to all modules and they can share code.
Allows to overwrite the path/dest module argument by providing path.
'''
if path is None:
path = params.get('path', params.get('dest', None))
if path is None:
return {}
else:
path = os.path.expanduser(os.path.expandvars(path))
b_path = to_bytes(path, errors='surrogate_or_strict')
# if the path is a symlink, and we're following links, get
# the target of the link instead for testing
if params.get('follow', False) and os.path.islink(b_path):
b_path = os.path.realpath(b_path)
path = to_native(b_path)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
# selinux related options
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
secontext.append(selevel)
default_secontext = self.selinux_default_context(path)
for i in range(len(default_secontext)):
if i is not None and secontext[i] == '_default':
secontext[i] = default_secontext[i]
attributes = params.get('attributes', None)
return dict(
path=path, mode=mode, owner=owner, group=group,
seuser=seuser, serole=serole, setype=setype,
selevel=selevel, secontext=secontext, attributes=attributes,
)
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
# will get the selevel as part of the context returned
# by selinux.lgetfilecon().
def selinux_mls_enabled(self):
if self._selinux_mls_enabled is None:
self._selinux_mls_enabled = HAVE_SELINUX and selinux.is_selinux_mls_enabled() == 1
return self._selinux_mls_enabled
def selinux_enabled(self):
if self._selinux_enabled is None:
self._selinux_enabled = HAVE_SELINUX and selinux.is_selinux_enabled() == 1
return self._selinux_enabled
# Determine whether we need a placeholder for selevel/mls
def selinux_initial_context(self):
if self._selinux_initial_context is None:
self._selinux_initial_context = [None, None, None]
if self.selinux_mls_enabled():
self._selinux_initial_context.append(None)
return self._selinux_initial_context
# If selinux fails to find a default, return an array of None
def selinux_default_context(self, path, mode=0):
context = self.selinux_initial_context()
if not self.selinux_enabled():
return context
try:
ret = selinux.matchpathcon(to_native(path, errors='surrogate_or_strict'), mode)
except OSError:
return context
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def selinux_context(self, path):
context = self.selinux_initial_context()
if not self.selinux_enabled():
return context
try:
ret = selinux.lgetfilecon_raw(to_native(path, errors='surrogate_or_strict'))
except OSError as e:
if e.errno == errno.ENOENT:
self.fail_json(path=path, msg='path %s does not exist' % path)
else:
self.fail_json(path=path, msg='failed to retrieve selinux context')
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def user_and_group(self, path, expand=True):
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
st = os.lstat(b_path)
uid = st.st_uid
gid = st.st_gid
return (uid, gid)
def find_mount_point(self, path):
'''
Takes a path and returns its mount point
:param path: a string type with a filesystem path
:returns: the path to the mount point as a text type
'''
b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict'))
while not os.path.ismount(b_path):
b_path = os.path.dirname(b_path)
return to_text(b_path, errors='surrogate_or_strict')
def is_special_selinux_path(self, path):
"""
Returns a tuple containing (True, selinux_context) if the given path is on a
NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
mount_data = f.readlines()
f.close()
except Exception:
return (False, None)
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
if to_bytes(path_mount_point) == to_bytes(mount_point):
for fs in self._selinux_special_fs:
if fs in fstype:
special_context = self.selinux_context(path_mount_point)
return (True, special_context)
return (False, None)
def set_default_selinux_context(self, path, changed):
if not self.selinux_enabled():
return changed
context = self.selinux_default_context(path)
return self.set_context_if_different(path, context, False)
def set_context_if_different(self, path, context, changed, diff=None):
if not self.selinux_enabled():
return changed
if self.check_file_absent_if_check_mode(path):
return True
cur_context = self.selinux_context(path)
new_context = list(cur_context)
# Iterate over the current context instead of the
# argument context, which may have selevel.
(is_special_se, sp_context) = self.is_special_selinux_path(path)
if is_special_se:
new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
if context[i] is not None and context[i] != cur_context[i]:
new_context[i] = context[i]
elif context[i] is None:
new_context[i] = cur_context[i]
if cur_context != new_context:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['secontext'] = cur_context
if 'after' not in diff:
diff['after'] = {}
diff['after']['secontext'] = new_context
try:
if self.check_mode:
return True
rc = selinux.lsetfilecon(to_native(path), ':'.join(new_context))
except OSError as e:
self.fail_json(path=path, msg='invalid selinux context: %s' % to_native(e),
new_context=new_context, cur_context=cur_context, input_was=context)
if rc != 0:
self.fail_json(path=path, msg='set selinux context failed')
changed = True
return changed
def set_owner_if_different(self, path, owner, changed, diff=None, expand=True):
if owner is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
orig_uid, orig_gid = self.user_and_group(b_path, expand)
try:
uid = int(owner)
except ValueError:
try:
uid = pwd.getpwnam(owner).pw_uid
except KeyError:
path = to_text(b_path)
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
if orig_uid != uid:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['owner'] = orig_uid
if 'after' not in diff:
diff['after'] = {}
diff['after']['owner'] = uid
if self.check_mode:
return True
try:
os.lchown(b_path, uid, -1)
except (IOError, OSError) as e:
path = to_text(b_path)
self.fail_json(path=path, msg='chown failed: %s' % (to_text(e)))
changed = True
return changed
def set_group_if_different(self, path, group, changed, diff=None, expand=True):
if group is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
orig_uid, orig_gid = self.user_and_group(b_path, expand)
try:
gid = int(group)
except ValueError:
try:
gid = grp.getgrnam(group).gr_gid
except KeyError:
path = to_text(b_path)
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
if orig_gid != gid:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['group'] = orig_gid
if 'after' not in diff:
diff['after'] = {}
diff['after']['group'] = gid
if self.check_mode:
return True
try:
os.lchown(b_path, -1, gid)
except OSError:
path = to_text(b_path)
self.fail_json(path=path, msg='chgrp failed')
changed = True
return changed
def set_mode_if_different(self, path, mode, changed, diff=None, expand=True):
if mode is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
path_stat = os.lstat(b_path)
if not isinstance(mode, int):
try:
mode = int(mode, 8)
except Exception:
try:
mode = self._symbolic_mode_to_octal(path_stat, mode)
except Exception as e:
path = to_text(b_path)
self.fail_json(path=path,
msg="mode must be in octal or symbolic form",
details=to_native(e))
if mode != stat.S_IMODE(mode):
# prevent mode from having extra info or being invalid long number
path = to_text(b_path)
self.fail_json(path=path, msg="Invalid mode supplied, only permission info is allowed", details=mode)
prev_mode = stat.S_IMODE(path_stat.st_mode)
if prev_mode != mode:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['mode'] = '0%03o' % prev_mode
if 'after' not in diff:
diff['after'] = {}
diff['after']['mode'] = '0%03o' % mode
if self.check_mode:
return True
# FIXME: comparison against string above will cause this to be executed
# every time
try:
if hasattr(os, 'lchmod'):
os.lchmod(b_path, mode)
else:
if not os.path.islink(b_path):
os.chmod(b_path, mode)
else:
# Attempt to set the perms of the symlink but be
# careful not to change the perms of the underlying
# file while trying
underlying_stat = os.stat(b_path)
os.chmod(b_path, mode)
new_underlying_stat = os.stat(b_path)
if underlying_stat.st_mode != new_underlying_stat.st_mode:
os.chmod(b_path, stat.S_IMODE(underlying_stat.st_mode))
except OSError as e:
if os.path.islink(b_path) and e.errno in (
errno.EACCES, # can't access symlink in sticky directory (stat)
errno.EPERM, # can't set mode on symbolic links (chmod)
errno.EROFS, # can't set mode on read-only filesystem
):
pass
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise
except Exception as e:
path = to_text(b_path)
self.fail_json(path=path, msg='chmod failed', details=to_native(e),
exception=traceback.format_exc())
path_stat = os.lstat(b_path)
new_mode = stat.S_IMODE(path_stat.st_mode)
if new_mode != prev_mode:
changed = True
return changed
def set_attributes_if_different(self, path, attributes, changed, diff=None, expand=True):
if attributes is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
existing = self.get_file_attributes(b_path, include_version=False)
attr_mod = '='
if attributes.startswith(('-', '+')):
attr_mod = attributes[0]
attributes = attributes[1:]
if existing.get('attr_flags', '') != attributes or attr_mod == '-':
attrcmd = self.get_bin_path('chattr')
if attrcmd:
attrcmd = [attrcmd, '%s%s' % (attr_mod, attributes), b_path]
changed = True
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['attributes'] = existing.get('attr_flags')
if 'after' not in diff:
diff['after'] = {}
diff['after']['attributes'] = '%s%s' % (attr_mod, attributes)
if not self.check_mode:
try:
rc, out, err = self.run_command(attrcmd)
if rc != 0 or err:
raise Exception("Error while setting attributes: %s" % (out + err))
except Exception as e:
self.fail_json(path=to_text(b_path), msg='chattr failed',
details=to_native(e), exception=traceback.format_exc())
return changed
def get_file_attributes(self, path, include_version=True):
output = {}
attrcmd = self.get_bin_path('lsattr', False)
if attrcmd:
flags = '-vd' if include_version else '-d'
attrcmd = [attrcmd, flags, path]
try:
rc, out, err = self.run_command(attrcmd)
if rc == 0:
res = out.split()
attr_flags_idx = 0
if include_version:
attr_flags_idx = 1
output['version'] = res[0].strip()
output['attr_flags'] = res[attr_flags_idx].replace('-', '').strip()
output['attributes'] = format_attributes(output['attr_flags'])
except Exception:
pass
return output
@classmethod
def _symbolic_mode_to_octal(cls, path_stat, symbolic_mode):
"""
This enables symbolic chmod string parsing as stated in the chmod man-page
This includes things like: "u=rw-x+X,g=r-x+X,o=r-x+X"
"""
new_mode = stat.S_IMODE(path_stat.st_mode)
# Now parse all symbolic modes
for mode in symbolic_mode.split(','):
# Per single mode. This always contains a '+', '-' or '='
# Split it on that
permlist = MODE_OPERATOR_RE.split(mode)
# And find all the operators
opers = MODE_OPERATOR_RE.findall(mode)
# The user(s) where it's all about is the first element in the
# 'permlist' list. Take that and remove it from the list.
# An empty user or 'a' means 'all'.
users = permlist.pop(0)
use_umask = (users == '')
if users == 'a' or users == '':
users = 'ugo'
# Check if there are illegal characters in the user list
# They can end up in 'users' because they are not split
if not USERS_RE.match(users):
raise ValueError("bad symbolic permission for mode: %s" % mode)
# Now we have two list of equal length, one contains the requested
# permissions and one with the corresponding operators.
for idx, perms in enumerate(permlist):
# Check if there are illegal characters in the permissions
if not PERMS_RE.match(perms):
raise ValueError("bad symbolic permission for mode: %s" % mode)
for user in users:
mode_to_apply = cls._get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask, new_mode)
new_mode = cls._apply_operation_to_mode(user, opers[idx], mode_to_apply, new_mode)
return new_mode
@staticmethod
def _apply_operation_to_mode(user, operator, mode_to_apply, current_mode):
if operator == '=':
if user == 'u':
mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g':
mask = stat.S_IRWXG | stat.S_ISGID
elif user == 'o':
mask = stat.S_IRWXO | stat.S_ISVTX
# mask out u, g, or o permissions from current_mode and apply new permissions
inverse_mask = mask ^ PERM_BITS
new_mode = (current_mode & inverse_mask) | mode_to_apply
elif operator == '+':
new_mode = current_mode | mode_to_apply
elif operator == '-':
new_mode = current_mode - (current_mode & mode_to_apply)
return new_mode
@staticmethod
def _get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask, prev_mode=None):
if prev_mode is None:
prev_mode = stat.S_IMODE(path_stat.st_mode)
is_directory = stat.S_ISDIR(path_stat.st_mode)
has_x_permissions = (prev_mode & EXEC_PERM_BITS) > 0
apply_X_permission = is_directory or has_x_permissions
# Get the umask, if the 'user' part is empty, the effect is as if (a) were
# given, but bits that are set in the umask are not affected.
# We also need the "reversed umask" for masking
umask = os.umask(0)
os.umask(umask)
rev_umask = umask ^ PERM_BITS
# Permission bits constants documented at:
# https://docs.python.org/3/library/stat.html#stat.S_ISUID
if apply_X_permission:
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
'o': {'X': stat.S_IXOTH},
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
'o': {'X': 0},
}
user_perms_to_modes = {
'u': {
'r': rev_umask & stat.S_IRUSR if use_umask else stat.S_IRUSR,
'w': rev_umask & stat.S_IWUSR if use_umask else stat.S_IWUSR,
'x': rev_umask & stat.S_IXUSR if use_umask else stat.S_IXUSR,
's': stat.S_ISUID,
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
'o': (prev_mode & stat.S_IRWXO) << 6},
'g': {
'r': rev_umask & stat.S_IRGRP if use_umask else stat.S_IRGRP,
'w': rev_umask & stat.S_IWGRP if use_umask else stat.S_IWGRP,
'x': rev_umask & stat.S_IXGRP if use_umask else stat.S_IXGRP,
's': stat.S_ISGID,
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
'o': (prev_mode & stat.S_IRWXO) << 3},
'o': {
'r': rev_umask & stat.S_IROTH if use_umask else stat.S_IROTH,
'w': rev_umask & stat.S_IWOTH if use_umask else stat.S_IWOTH,
'x': rev_umask & stat.S_IXOTH if use_umask else stat.S_IXOTH,
's': 0,
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
'o': prev_mode & stat.S_IRWXO},
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
def or_reduce(mode, perm):
return mode | user_perms_to_modes[user][perm]
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True):
# set modes owners and context as needed
changed = self.set_context_if_different(
file_args['path'], file_args['secontext'], changed, diff
)
changed = self.set_owner_if_different(
file_args['path'], file_args['owner'], changed, diff, expand
)
changed = self.set_group_if_different(
file_args['path'], file_args['group'], changed, diff, expand
)
changed = self.set_mode_if_different(
file_args['path'], file_args['mode'], changed, diff, expand
)
changed = self.set_attributes_if_different(
file_args['path'], file_args['attributes'], changed, diff, expand
)
return changed
def check_file_absent_if_check_mode(self, file_path):
return self.check_mode and not os.path.exists(file_path)
def set_directory_attributes_if_different(self, file_args, changed, diff=None, expand=True):
return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
def set_file_attributes_if_different(self, file_args, changed, diff=None, expand=True):
return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
def add_path_info(self, kwargs):
'''
for results that are files, supplement the info about the file
in the return path with stats about the file path.
'''
path = kwargs.get('path', kwargs.get('dest', None))
if path is None:
return kwargs
b_path = to_bytes(path, errors='surrogate_or_strict')
if os.path.exists(b_path):
(uid, gid) = self.user_and_group(path)
kwargs['uid'] = uid
kwargs['gid'] = gid
try:
user = pwd.getpwuid(uid)[0]
except KeyError:
user = str(uid)
try:
group = grp.getgrgid(gid)[0]
except KeyError:
group = str(gid)
kwargs['owner'] = user
kwargs['group'] = group
st = os.lstat(b_path)
kwargs['mode'] = '0%03o' % stat.S_IMODE(st[stat.ST_MODE])
# secontext not yet supported
if os.path.islink(b_path):
kwargs['state'] = 'link'
elif os.path.isdir(b_path):
kwargs['state'] = 'directory'
elif os.stat(b_path).st_nlink > 1:
kwargs['state'] = 'hard'
else:
kwargs['state'] = 'file'
if self.selinux_enabled():
kwargs['secontext'] = ':'.join(self.selinux_context(path))
kwargs['size'] = st[stat.ST_SIZE]
return kwargs
def _check_locale(self):
'''
Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings)
'''
try:
# setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
# fallback to the 'best' locale, per the function
# final fallback is 'C', which may cause unicode issues
# but is preferable to simply failing on unknown locale
best_locale = get_best_parsable_locale(self)
# need to set several since many tools choose to ignore documented precedence and scope
locale.setlocale(locale.LC_ALL, best_locale)
os.environ['LANG'] = best_locale
os.environ['LC_ALL'] = best_locale
os.environ['LC_MESSAGES'] = best_locale
except Exception as e:
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" %
to_native(e), exception=traceback.format_exc())
def _set_internal_properties(self, argument_spec=None, module_parameters=None):
if argument_spec is None:
argument_spec = self.argument_spec
if module_parameters is None:
module_parameters = self.params
for k in PASS_VARS:
# handle setting internal properties from internal ansible vars
param_key = '_ansible_%s' % k
if param_key in module_parameters:
if k in PASS_BOOLS:
setattr(self, PASS_VARS[k][0], self.boolean(module_parameters[param_key]))
else:
setattr(self, PASS_VARS[k][0], module_parameters[param_key])
# clean up internal top level params:
if param_key in self.params:
del self.params[param_key]
else:
# use defaults if not already set
if not hasattr(self, PASS_VARS[k][0]):
setattr(self, PASS_VARS[k][0], PASS_VARS[k][1])
def safe_eval(self, value, locals=None, include_exceptions=False):
return safe_eval(value, locals, include_exceptions)
def _load_params(self):
''' read the input and set the params attribute.
This method is for backwards compatibility. The guts of the function
were moved out in 2.1 so that custom modules could read the parameters.
'''
# debug overrides to read args from file or cmdline
self.params = _load_params()
def _log_to_syslog(self, msg):
if HAS_SYSLOG:
try:
module = 'ansible-%s' % self._name
facility = getattr(syslog, self._syslog_facility, syslog.LOG_USER)
syslog.openlog(str(module), 0, facility)
syslog.syslog(syslog.LOG_INFO, msg)
except (TypeError, ValueError) as e:
self.fail_json(
msg='Failed to log to syslog (%s). To proceed anyway, '
'disable syslog logging by setting no_target_syslog '
'to True in your Ansible config.' % to_native(e),
exception=traceback.format_exc(),
msg_to_log=msg,
)
def debug(self, msg):
if self._debug:
self.log('[debug] %s' % msg)
def log(self, msg, log_args=None):
if not self.no_log:
if log_args is None:
log_args = dict()
module = 'ansible-%s' % self._name
if isinstance(module, binary_type):
module = module.decode('utf-8', 'replace')
# 6655 - allow for accented characters
if not isinstance(msg, (binary_type, text_type)):
raise TypeError("msg should be a string (got %s)" % type(msg))
# We want journal to always take text type
# syslog takes bytes on py2, text type on py3
if isinstance(msg, binary_type):
journal_msg = remove_values(msg.decode('utf-8', 'replace'), self.no_log_values)
else:
# TODO: surrogateescape is a danger here on Py3
journal_msg = remove_values(msg, self.no_log_values)
if PY3:
syslog_msg = journal_msg
else:
syslog_msg = journal_msg.encode('utf-8', 'replace')
if has_journal:
journal_args = [("MODULE", os.path.basename(__file__))]
for arg in log_args:
name, value = (arg.upper(), str(log_args[arg]))
if name in (
'PRIORITY', 'MESSAGE', 'MESSAGE_ID',
'CODE_FILE', 'CODE_LINE', 'CODE_FUNC',
'SYSLOG_FACILITY', 'SYSLOG_IDENTIFIER',
'SYSLOG_PID',
):
name = "_%s" % name
journal_args.append((name, value))
try:
if HAS_SYSLOG:
# If syslog_facility specified, it needs to convert
# from the facility name to the facility code, and
# set it as SYSLOG_FACILITY argument of journal.send()
facility = getattr(syslog,
self._syslog_facility,
syslog.LOG_USER) >> 3
journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
SYSLOG_FACILITY=facility,
**dict(journal_args))
else:
journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
**dict(journal_args))
except IOError:
# fall back to syslog since logging to journal failed
self._log_to_syslog(syslog_msg)
else:
self._log_to_syslog(syslog_msg)
def _log_invocation(self):
''' log that ansible ran the module '''
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
for param in self.params:
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', None)
# try to proactively capture password/passphrase fields
if no_log is None and PASSWORD_MATCH.search(param):
log_args[param] = 'NOT_LOGGING_PASSWORD'
self.warn('Module did not set no_log for %s' % param)
elif self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
else:
param_val = self.params[param]
if not isinstance(param_val, (text_type, binary_type)):
param_val = str(param_val)
elif isinstance(param_val, text_type):
param_val = param_val.encode('utf-8')
log_args[param] = heuristic_log_sanitize(param_val, self.no_log_values)
msg = ['%s=%s' % (to_native(arg), to_native(val)) for arg, val in log_args.items()]
if msg:
msg = 'Invoked with %s' % ' '.join(msg)
else:
msg = 'Invoked'
self.log(msg, log_args=log_args)
def _set_cwd(self):
try:
cwd = os.getcwd()
if not os.access(cwd, os.F_OK | os.R_OK):
raise Exception()
return cwd
except Exception:
# we don't have access to the cwd, probably because of sudo.
# Try and move to a neutral location to prevent errors
for cwd in [self.tmpdir, os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
if os.access(cwd, os.F_OK | os.R_OK):
os.chdir(cwd)
return cwd
except Exception:
pass
# we won't error here, as it may *not* be a problem,
# and we don't want to break modules unnecessarily
return None
def get_bin_path(self, arg, required=False, opt_dirs=None):
'''
Find system executable in PATH.
:param arg: The executable to find.
:param required: if executable is not found and required is ``True``, fail_json
:param opt_dirs: optional list of directories to search in addition to ``PATH``
:returns: if found return full path; otherwise return None
'''
bin_path = None
try:
bin_path = get_bin_path(arg=arg, opt_dirs=opt_dirs)
except ValueError as e:
if required:
self.fail_json(msg=to_text(e))
else:
return bin_path
return bin_path
def boolean(self, arg):
'''Convert the argument to a boolean'''
if arg is None:
return arg
try:
return boolean(arg)
except TypeError as e:
self.fail_json(msg=to_native(e))
def jsonify(self, data):
try:
return jsonify(data)
except UnicodeError as e:
self.fail_json(msg=to_text(e))
def from_json(self, data):
return json.loads(data)
def add_cleanup_file(self, path):
if path not in self.cleanup_files:
self.cleanup_files.append(path)
def do_cleanup_files(self):
for path in self.cleanup_files:
self.cleanup(path)
def _return_formatted(self, kwargs):
self.add_path_info(kwargs)
if 'invocation' not in kwargs:
kwargs['invocation'] = {'module_args': self.params}
if 'warnings' in kwargs:
if isinstance(kwargs['warnings'], list):
for w in kwargs['warnings']:
self.warn(w)
else:
self.warn(kwargs['warnings'])
warnings = get_warning_messages()
if warnings:
kwargs['warnings'] = warnings
if 'deprecations' in kwargs:
if isinstance(kwargs['deprecations'], list):
for d in kwargs['deprecations']:
if isinstance(d, SEQUENCETYPE) and len(d) == 2:
self.deprecate(d[0], version=d[1])
elif isinstance(d, Mapping):
self.deprecate(d['msg'], version=d.get('version'), date=d.get('date'),
collection_name=d.get('collection_name'))
else:
self.deprecate(d) # pylint: disable=ansible-deprecated-no-version
else:
self.deprecate(kwargs['deprecations']) # pylint: disable=ansible-deprecated-no-version
deprecations = get_deprecation_messages()
if deprecations:
kwargs['deprecations'] = deprecations
# preserve bools/none from no_log
# TODO: once python version on target high enough, dict comprh
preserved = {}
for k, v in kwargs.items():
if v is None or isinstance(v, bool):
preserved[k] = v
# strip no_log collisions
kwargs = remove_values(kwargs, self.no_log_values)
# return preserved
kwargs.update(preserved)
print('\n%s' % self.jsonify(kwargs))
def exit_json(self, **kwargs):
''' return from the module, without error '''
self.do_cleanup_files()
self._return_formatted(kwargs)
sys.exit(0)
def fail_json(self, msg, **kwargs):
''' return from the module, with an error message '''
kwargs['failed'] = True
kwargs['msg'] = msg
# Add traceback if debug or high verbosity and it is missing
# NOTE: Badly named as exception, it really always has been a traceback
if 'exception' not in kwargs and sys.exc_info()[2] and (self._debug or self._verbosity >= 3):
if PY2:
# On Python 2 this is the last (stack frame) exception and as such may be unrelated to the failure
kwargs['exception'] = 'WARNING: The below traceback may *not* be related to the actual failure.\n' +\
''.join(traceback.format_tb(sys.exc_info()[2]))
else:
kwargs['exception'] = ''.join(traceback.format_tb(sys.exc_info()[2]))
self.do_cleanup_files()
self._return_formatted(kwargs)
sys.exit(1)
def fail_on_missing_params(self, required_params=None):
if not required_params:
return
try:
check_missing_parameters(self.params, required_params)
except TypeError as e:
self.fail_json(msg=to_native(e))
def digest_from_file(self, filename, algorithm):
''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
b_filename = to_bytes(filename, errors='surrogate_or_strict')
if not os.path.exists(b_filename):
return None
if os.path.isdir(b_filename):
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
# preserve old behaviour where the third parameter was a hash algorithm object
if hasattr(algorithm, 'hexdigest'):
digest_method = algorithm
else:
try:
digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]()
except KeyError:
self.fail_json(msg="Could not hash file '%s' with algorithm '%s'. Available algorithms: %s" %
(filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
blocksize = 64 * 1024
infile = open(os.path.realpath(b_filename), 'rb')
block = infile.read(blocksize)
while block:
digest_method.update(block)
block = infile.read(blocksize)
infile.close()
return digest_method.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for:
1) Optional backwards compatibility
2) Compatibility with a third party protocol
This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead.
'''
if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, 'md5')
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha1')
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha256')
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
backupdest = ''
if os.path.exists(fn):
# backups named basename.PID.YYYY-MM-DD@HH:MM:SS~
ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
backupdest = '%s.%s.%s' % (fn, os.getpid(), ext)
try:
self.preserved_copy(fn, backupdest)
except (shutil.Error, IOError) as e:
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, to_native(e)))
return backupdest
def cleanup(self, tmpfile):
if os.path.exists(tmpfile):
try:
os.unlink(tmpfile)
except OSError as e:
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, to_native(e)))
def preserved_copy(self, src, dest):
"""Copy a file with preserved ownership, permissions and context"""
# shutil.copy2(src, dst)
# Similar to shutil.copy(), but metadata is copied as well - in fact,
# this is just shutil.copy() followed by copystat(). This is similar
# to the Unix command cp -p.
#
# shutil.copystat(src, dst)
# Copy the permission bits, last access time, last modification time,
# and flags from src to dst. The file contents, owner, and group are
# unaffected. src and dst are path names given as strings.
shutil.copy2(src, dest)
# Set the context
if self.selinux_enabled():
context = self.selinux_context(src)
self.set_context_if_different(dest, context, False)
# chown it
try:
dest_stat = os.stat(src)
tmp_stat = os.stat(dest)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(dest, dest_stat.st_uid, dest_stat.st_gid)
except OSError as e:
if e.errno != errno.EPERM:
raise
# Set the attributes
current_attribs = self.get_file_attributes(src, include_version=False)
current_attribs = current_attribs.get('attr_flags', '')
self.set_attributes_if_different(dest, current_attribs, True)
def atomic_move(self, src, dest, unsafe_writes=False):
'''atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible'''
context = None
dest_stat = None
b_src = to_bytes(src, errors='surrogate_or_strict')
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if os.path.exists(b_dest):
try:
dest_stat = os.stat(b_dest)
# copy mode and ownership
os.chmod(b_src, dest_stat.st_mode & PERM_BITS)
os.chown(b_src, dest_stat.st_uid, dest_stat.st_gid)
# try to copy flags if possible
if hasattr(os, 'chflags') and hasattr(dest_stat, 'st_flags'):
try:
os.chflags(b_src, dest_stat.st_flags)
except OSError as e:
for err in 'EOPNOTSUPP', 'ENOTSUP':
if hasattr(errno, err) and e.errno == getattr(errno, err):
break
else:
raise
except OSError as e:
if e.errno != errno.EPERM:
raise
if self.selinux_enabled():
context = self.selinux_context(dest)
else:
if self.selinux_enabled():
context = self.selinux_default_context(dest)
creating = not os.path.exists(b_dest)
try:
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(b_src, b_dest)
except (IOError, OSError) as e:
if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY, errno.EBUSY]:
# only try workarounds for errno 18 (cross device), 1 (not permitted), 13 (permission denied)
# and 26 (text file busy) which happens on vagrant synced folders and other 'exotic' non posix file systems
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
else:
# Use bytes here. In the shippable CI, this fails with
# a UnicodeError with surrogateescape'd strings for an unknown
# reason (doesn't happen in a local Ubuntu16.04 VM)
b_dest_dir = os.path.dirname(b_dest)
b_suffix = os.path.basename(b_dest)
error_msg = None
tmp_dest_name = None
try:
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=b'.ansible_tmp', dir=b_dest_dir, suffix=b_suffix)
except (OSError, IOError) as e:
error_msg = 'The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), to_native(e))
finally:
if error_msg:
if unsafe_writes:
self._unsafe_writes(b_src, b_dest)
else:
self.fail_json(msg=error_msg, exception=traceback.format_exc())
if tmp_dest_name:
b_tmp_dest_name = to_bytes(tmp_dest_name, errors='surrogate_or_strict')
try:
try:
# close tmp file handle before file operations to prevent text file busy errors on vboxfs synced folders (windows host)
os.close(tmp_dest_fd)
# leaves tmp file behind when sudo and not root
try:
shutil.move(b_src, b_tmp_dest_name)
except OSError:
# cleanup will happen by 'rm' of tmpdir
# copy2 will preserve some metadata
shutil.copy2(b_src, b_tmp_dest_name)
if self.selinux_enabled():
self.set_context_if_different(
b_tmp_dest_name, context, False)
try:
tmp_stat = os.stat(b_tmp_dest_name)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(b_tmp_dest_name, dest_stat.st_uid, dest_stat.st_gid)
except OSError as e:
if e.errno != errno.EPERM:
raise
try:
os.rename(b_tmp_dest_name, b_dest)
except (shutil.Error, OSError, IOError) as e:
if unsafe_writes and e.errno == errno.EBUSY:
self._unsafe_writes(b_tmp_dest_name, b_dest)
else:
self.fail_json(msg='Unable to make %s into to %s, failed final rename from %s: %s' %
(src, dest, b_tmp_dest_name, to_native(e)), exception=traceback.format_exc())
except (shutil.Error, OSError, IOError) as e:
if unsafe_writes:
self._unsafe_writes(b_src, b_dest)
else:
self.fail_json(msg='Failed to replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
finally:
self.cleanup(b_tmp_dest_name)
if creating:
# make sure the file has the correct permissions
# based on the current value of umask
umask = os.umask(0)
os.umask(umask)
os.chmod(b_dest, DEFAULT_PERM & ~umask)
try:
os.chown(b_dest, os.geteuid(), os.getegid())
except OSError:
# We're okay with trying our best here. If the user is not
# root (or old Unices) they won't be able to chown.
pass
if self.selinux_enabled():
# rename might not preserve context
self.set_context_if_different(dest, context, False)
def _unsafe_writes(self, src, dest):
# sadly there are some situations where we cannot ensure atomicity, but only if
# the user insists and we get the appropriate error we update the file unsafely
try:
out_dest = in_src = None
try:
out_dest = open(dest, 'wb')
in_src = open(src, 'rb')
shutil.copyfileobj(in_src, out_dest)
finally: # assuring closed files in 2.4 compatible way
if out_dest:
out_dest.close()
if in_src:
in_src.close()
except (shutil.Error, OSError, IOError) as e:
self.fail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, to_native(e)),
exception=traceback.format_exc())
def _clean_args(self, args):
if not self._clean:
# create a printable version of the command for use in reporting later,
# which strips out things like passwords from the args list
to_clean_args = args
if PY2:
if isinstance(args, text_type):
to_clean_args = to_bytes(args)
else:
if isinstance(args, binary_type):
to_clean_args = to_text(args)
if isinstance(args, (text_type, binary_type)):
to_clean_args = shlex.split(to_clean_args)
clean_args = []
is_passwd = False
for arg in (to_native(a) for a in to_clean_args):
if is_passwd:
is_passwd = False
clean_args.append('********')
continue
if PASSWD_ARG_RE.match(arg):
sep_idx = arg.find('=')
if sep_idx > -1:
clean_args.append('%s=********' % arg[:sep_idx])
continue
else:
is_passwd = True
arg = heuristic_log_sanitize(arg, self.no_log_values)
clean_args.append(arg)
self._clean = ' '.join(shlex_quote(arg) for arg in clean_args)
return self._clean
def _restore_signal_handlers(self):
# Reset SIGPIPE to SIG_DFL, otherwise in Python2.7 it gets ignored in subprocesses.
if PY2 and sys.platform != 'win32':
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None,
use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict',
expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None, ignore_invalid_cwd=True, handle_exceptions=True):
'''
Execute a command, returns rc, stdout, and stderr.
The mechanism of this method for reading stdout and stderr differs from
that of CPython subprocess.Popen.communicate, in that this method will
stop reading once the spawned command has exited and stdout and stderr
have been consumed, as opposed to waiting until stdout/stderr are
closed. This can be an important distinction, when taken into account
that a forked or backgrounded process may hold stdout or stderr open
for longer than the spawned command.
:arg args: is the command to run
* If args is a list, the command will be run with shell=False.
* If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
* If args is a string and use_unsafe_shell=True it runs with shell=True.
:kw check_rc: Whether to call fail_json in case of non zero RC.
Default False
:kw close_fds: See documentation for subprocess.Popen(). Default True
:kw executable: See documentation for subprocess.Popen(). Default None
:kw data: If given, information to write to the stdin of the command
:kw binary_data: If False, append a newline to the data. Default False
:kw path_prefix: If given, additional path to find the command in.
This adds to the PATH environment variable so helper commands in
the same directory can also be found
:kw cwd: If given, working directory to run the command inside
:kw use_unsafe_shell: See `args` parameter. Default False
:kw prompt_regex: Regex string (not a compiled regex) which can be
used to detect prompts in the stdout which would otherwise cause
the execution to hang (especially if no input data is specified)
:kw environ_update: dictionary to *update* environ variables with
:kw umask: Umask to be used when running the command. Default None
:kw encoding: Since we return native strings, on python3 we need to
know the encoding to use to transform from bytes to text. If you
want to always get bytes back, use encoding=None. The default is
"utf-8". This does not affect transformation of strings given as
args.
:kw errors: Since we return native strings, on python3 we need to
transform stdout and stderr from bytes to text. If the bytes are
undecodable in the ``encoding`` specified, then use this error
handler to deal with them. The default is ``surrogate_or_strict``
which means that the bytes will be decoded using the
surrogateescape error handler if available (available on all
python3 versions we support) otherwise a UnicodeError traceback
will be raised. This does not affect transformations of strings
given as args.
:kw expand_user_and_vars: When ``use_unsafe_shell=False`` this argument
dictates whether ``~`` is expanded in paths and environment variables
are expanded before running the command. When ``True`` a string such as
``$SHELL`` will be expanded regardless of escaping. When ``False`` and
``use_unsafe_shell=False`` no path or variable expansion will be done.
:kw pass_fds: When running on Python 3 this argument
dictates which file descriptors should be passed
to an underlying ``Popen`` constructor. On Python 2, this will
set ``close_fds`` to False.
:kw before_communicate_callback: This function will be called
after ``Popen`` object will be created
but before communicating to the process.
(``Popen`` object will be passed to callback as a first argument)
:kw ignore_invalid_cwd: This flag indicates whether an invalid ``cwd``
(non-existent or not a directory) should be ignored or should raise
an exception.
:kw handle_exceptions: This flag indicates whether an exception will
be handled inline and issue a failed_json or if the caller should
handle it.
:returns: A 3-tuple of return code (integer), stdout (native string),
and stderr (native string). On python2, stdout and stderr are both
byte strings. On python3, stdout and stderr are text strings converted
according to the encoding and errors parameters. If you want byte
strings on python3, use encoding=None to turn decoding to text off.
'''
# used by clean args later on
self._clean = None
if not isinstance(args, (list, binary_type, text_type)):
msg = "Argument 'args' to run_command must be list or string"
self.fail_json(rc=257, cmd=args, msg=msg)
shell = False
if use_unsafe_shell:
# stringify args for unsafe/direct shell usage
if isinstance(args, list):
args = b" ".join([to_bytes(shlex_quote(x), errors='surrogate_or_strict') for x in args])
else:
args = to_bytes(args, errors='surrogate_or_strict')
# not set explicitly, check if set by controller
if executable:
executable = to_bytes(executable, errors='surrogate_or_strict')
args = [executable, b'-c', args]
elif self._shell not in (None, '/bin/sh'):
args = [to_bytes(self._shell, errors='surrogate_or_strict'), b'-c', args]
else:
shell = True
else:
# ensure args are a list
if isinstance(args, (binary_type, text_type)):
# On python2.6 and below, shlex has problems with text type
# On python3, shlex needs a text type.
if PY2:
args = to_bytes(args, errors='surrogate_or_strict')
elif PY3:
args = to_text(args, errors='surrogateescape')
args = shlex.split(args)
# expand ``~`` in paths, and all environment vars
if expand_user_and_vars:
args = [to_bytes(os.path.expanduser(os.path.expandvars(x)), errors='surrogate_or_strict') for x in args if x is not None]
else:
args = [to_bytes(x, errors='surrogate_or_strict') for x in args if x is not None]
prompt_re = None
if prompt_regex:
if isinstance(prompt_regex, text_type):
if PY3:
prompt_regex = to_bytes(prompt_regex, errors='surrogateescape')
elif PY2:
prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict')
try:
prompt_re = re.compile(prompt_regex, re.MULTILINE)
except re.error:
self.fail_json(msg="invalid prompt regular expression given to run_command")
rc = 0
msg = None
st_in = None
env = os.environ.copy()
# We can set this from both an attribute and per call
env.update(self.run_command_environ_update or {})
env.update(environ_update or {})
if path_prefix:
path = env.get('PATH', '')
if path:
env['PATH'] = "%s:%s" % (path_prefix, path)
else:
env['PATH'] = path_prefix
# If using test-module.py and explode, the remote lib path will resemble:
# /tmp/test_module_scratch/debug_dir/ansible/module_utils/basic.py
# If using ansible or ansible-playbook with a remote system:
# /tmp/ansible_vmweLQ/ansible_modlib.zip/ansible/module_utils/basic.py
# Clean out python paths set by ansiballz
if 'PYTHONPATH' in env:
pypaths = [x for x in env['PYTHONPATH'].split(':')
if x and
not x.endswith('/ansible_modlib.zip') and
not x.endswith('/debug_dir')]
if pypaths and any(pypaths):
env['PYTHONPATH'] = ':'.join(pypaths)
if data:
st_in = subprocess.PIPE
def preexec():
self._restore_signal_handlers()
if umask:
os.umask(umask)
kwargs = dict(
executable=executable,
shell=shell,
close_fds=close_fds,
stdin=st_in,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=preexec,
env=env,
)
if PY3 and pass_fds:
kwargs["pass_fds"] = pass_fds
elif PY2 and pass_fds:
kwargs['close_fds'] = False
# make sure we're in the right working directory
if cwd:
cwd = to_bytes(os.path.abspath(os.path.expanduser(cwd)), errors='surrogate_or_strict')
if os.path.isdir(cwd):
kwargs['cwd'] = cwd
elif not ignore_invalid_cwd:
self.fail_json(msg="Provided cwd is not a valid directory: %s" % cwd)
try:
if self._debug:
self.log('Executing: ' + self._clean_args(args))
cmd = subprocess.Popen(args, **kwargs)
if before_communicate_callback:
before_communicate_callback(cmd)
stdout = b''
stderr = b''
# Mirror the CPython subprocess logic and preference for the selector to use.
# poll/select have the advantage of not requiring any extra file
# descriptor, contrarily to epoll/kqueue (also, they require a single
# syscall).
if hasattr(selectors, 'PollSelector'):
selector = selectors.PollSelector()
else:
selector = selectors.SelectSelector()
if data:
if not binary_data:
data += '\n'
if isinstance(data, text_type):
data = to_bytes(data)
selector.register(cmd.stdout, selectors.EVENT_READ)
selector.register(cmd.stderr, selectors.EVENT_READ)
if os.name == 'posix':
fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
if data:
cmd.stdin.write(data)
cmd.stdin.close()
while True:
# A timeout of 1 is both a little short and a little long.
# With None we could deadlock, with a lower value we would
# waste cycles. As it is, this is a mild inconvenience if
# we need to exit, and likely doesn't waste too many cycles
events = selector.select(1)
stdout_changed = False
for key, event in events:
b_chunk = key.fileobj.read(32768)
if not b_chunk:
selector.unregister(key.fileobj)
elif key.fileobj == cmd.stdout:
stdout += b_chunk
stdout_changed = True
elif key.fileobj == cmd.stderr:
stderr += b_chunk
# if we're checking for prompts, do it now, but only if stdout
# actually changed since the last loop
if prompt_re and stdout_changed and prompt_re.search(stdout) and not data:
if encoding:
stdout = to_native(stdout, encoding=encoding, errors=errors)
return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
# break out if no pipes are left to read or the pipes are completely read
# and the process is terminated
if (not events or not selector.get_map()) and cmd.poll() is not None:
break
# No pipes are left to read but process is not yet terminated
# Only then it is safe to wait for the process to be finished
# NOTE: Actually cmd.poll() is always None here if no selectors are left
elif not selector.get_map() and cmd.poll() is None:
cmd.wait()
# The process is terminated. Since no pipes to read from are
# left, there is no need to call select() again.
break
cmd.stdout.close()
cmd.stderr.close()
selector.close()
rc = cmd.returncode
except (OSError, IOError) as e:
self.log("Error Executing CMD:%s Exception:%s" % (self._clean_args(args), to_native(e)))
if handle_exceptions:
self.fail_json(rc=e.errno, stdout=b'', stderr=b'', msg=to_native(e), cmd=self._clean_args(args))
else:
raise e
except Exception as e:
self.log("Error Executing CMD:%s Exception:%s" % (self._clean_args(args), to_native(traceback.format_exc())))
if handle_exceptions:
self.fail_json(rc=257, stdout=b'', stderr=b'', msg=to_native(e), exception=traceback.format_exc(), cmd=self._clean_args(args))
else:
raise e
if rc != 0 and check_rc:
msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values)
self.fail_json(cmd=self._clean_args(args), rc=rc, stdout=stdout, stderr=stderr, msg=msg)
if encoding is not None:
return (rc, to_native(stdout, encoding=encoding, errors=errors),
to_native(stderr, encoding=encoding, errors=errors))
return (rc, stdout, stderr)
def append_to_file(self, filename, str):
filename = os.path.expandvars(os.path.expanduser(filename))
fh = open(filename, 'a')
fh.write(str)
fh.close()
def bytes_to_human(self, size):
return bytes_to_human(size)
# for backwards compatibility
pretty_bytes = bytes_to_human
def human_to_bytes(self, number, isbits=False):
return human_to_bytes(number, isbits)
#
# Backwards compat
#
# In 2.0, moved from inside the module to the toplevel
is_executable = is_executable
@staticmethod
def get_buffer_size(fd):
try:
# 1032 == FZ_GETPIPE_SZ
buffer_size = fcntl.fcntl(fd, 1032)
except Exception:
try:
# not as exact as above, but should be good enough for most platforms that fail the previous call
buffer_size = select.PIPE_BUF
except Exception:
buffer_size = 9000 # use sane default JIC
return buffer_size
def get_module_path():
return os.path.dirname(os.path.realpath(__file__))
def __getattr__(importable_name):
"""Inject import-time deprecation warnings.
Specifically, for ``literal_eval()``, ``_literal_eval()``
and ``get_exception()``.
"""
if importable_name == 'get_exception':
deprecate(
msg=f'The `ansible.module_utils.basic.'
f'{importable_name}` function is deprecated.',
version='2.19',
)
from ansible.module_utils.pycompat24 import get_exception
return get_exception
if importable_name in {'literal_eval', '_literal_eval'}:
deprecate(
msg=f'The `ansible.module_utils.basic.'
f'{importable_name}` function is deprecated.',
version='2.19',
)
from ast import literal_eval
return literal_eval
raise AttributeError(
f'cannot import name {importable_name !r} '
f'has no attribute ({__file__ !s})',
)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,710 |
Configurable sampling/transfer of control-side task context metadata to targets
|
### Summary
We're often asked how to include arbitrary control-side contextual metadata with task invocations, and to include that metadata in target-side task log messages. e.g.: sending an AWX/Controller Job ID to the target hosts on each module invocation that occurred from that job, and logging it in the module-generated syslog/Windows Application Event Log entries for future correlation with the owning job.
I've not seen any consensus on precisely *which* data to include; one person's "critical forensic correlation data" is another's "unacceptable disclosure of sensitive execution detail". Seems like we'd need a generic facility to specify environment vars and/or hostvars to sample on the control host to be included with task invocations (under a reserved dictionary arg), and adjust the module logging APIs to include them.
My initial thought is to define a new core config element (defaulting to none) that allows the user to define a templated expression that would be rendered as part of each task's templating under a host context. The rendered result would be sent to modules as a new reserved internal module var. The module logging APIs would then include this value verbatim, when present. Other module code would also have access to the value, which could be used for anything. The new config would be settable either via ansible.cfg or an envvar, making it easier for AWX/Controller to later provide a mechanism to configure it for jobs using core versions that support it, while older versions would just silently ignore it.
Maybe something like:
```
ANSIBLE_ADDITIONAL_TASK_CONTEXT='{{awx_job_id}}'
```
When this config is non-empty, the defined template would be rendered for each task/host invocation, and its result included in a new `_ansible_additional_task_context` reserved module var. The resulting value, as with any Ansible template expression, could be of arbitrary complexity (eg, returning a data structure instead of just a scalar). The module logging APIs would include the serialized value verbatim in log messages when it is present, eg "ansible_additional_task_context=(whatever the value was)".
### Issue Type
Feature Idea
### Component Name
module invocation and logging
### Additional Information
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81710
|
https://github.com/ansible/ansible/pull/81711
|
4208bdbbcd994251579409ad533b40c9b0543550
|
1dd0d6fad70d7d4f423dac41822da65ff9ec95ef
| 2023-09-18T16:35:01Z |
python
| 2023-11-30T18:12:55Z |
lib/ansible/module_utils/common/parameters.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import annotations
import datetime
import os
from collections import deque
from itertools import chain
from ansible.module_utils.common.collections import is_iterable
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils.common.warnings import warn
from ansible.module_utils.errors import (
AliasError,
AnsibleFallbackNotFound,
AnsibleValidationErrorMultiple,
ArgumentTypeError,
ArgumentValueError,
ElementError,
MutuallyExclusiveError,
NoLogError,
RequiredByError,
RequiredError,
RequiredIfError,
RequiredOneOfError,
RequiredTogetherError,
SubParameterTypeError,
)
from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE
from ansible.module_utils.six.moves.collections_abc import (
KeysView,
Set,
Sequence,
Mapping,
MutableMapping,
MutableSet,
MutableSequence,
)
from ansible.module_utils.six import (
binary_type,
integer_types,
string_types,
text_type,
PY2,
PY3,
)
from ansible.module_utils.common.validation import (
check_mutually_exclusive,
check_required_arguments,
check_required_together,
check_required_one_of,
check_required_if,
check_required_by,
check_type_bits,
check_type_bool,
check_type_bytes,
check_type_dict,
check_type_float,
check_type_int,
check_type_jsonarg,
check_type_list,
check_type_path,
check_type_raw,
check_type_str,
)
# Python2 & 3 way to get NoneType
NoneType = type(None)
_ADDITIONAL_CHECKS = (
{'func': check_required_together, 'attr': 'required_together', 'err': RequiredTogetherError},
{'func': check_required_one_of, 'attr': 'required_one_of', 'err': RequiredOneOfError},
{'func': check_required_if, 'attr': 'required_if', 'err': RequiredIfError},
{'func': check_required_by, 'attr': 'required_by', 'err': RequiredByError},
)
# if adding boolean attribute, also add to PASS_BOOL
# some of this dupes defaults from controller config
PASS_VARS = {
'check_mode': ('check_mode', False),
'debug': ('_debug', False),
'diff': ('_diff', False),
'keep_remote_files': ('_keep_remote_files', False),
'ignore_unknown_opts': ('_ignore_unknown_opts', False),
'module_name': ('_name', None),
'no_log': ('no_log', False),
'remote_tmp': ('_remote_tmp', None),
'selinux_special_fs': ('_selinux_special_fs', ['fuse', 'nfs', 'vboxsf', 'ramfs', '9p', 'vfat']),
'shell_executable': ('_shell', '/bin/sh'),
'socket': ('_socket_path', None),
'string_conversion_action': ('_string_conversion_action', 'warn'),
'syslog_facility': ('_syslog_facility', 'INFO'),
'tmpdir': ('_tmpdir', None),
'verbosity': ('_verbosity', 0),
'version': ('ansible_version', '0.0'),
}
PASS_BOOLS = ('check_mode', 'debug', 'diff', 'keep_remote_files', 'ignore_unknown_opts', 'no_log')
DEFAULT_TYPE_VALIDATORS = {
'str': check_type_str,
'list': check_type_list,
'dict': check_type_dict,
'bool': check_type_bool,
'int': check_type_int,
'float': check_type_float,
'path': check_type_path,
'raw': check_type_raw,
'jsonarg': check_type_jsonarg,
'json': check_type_jsonarg,
'bytes': check_type_bytes,
'bits': check_type_bits,
}
def _get_type_validator(wanted):
"""Returns the callable used to validate a wanted type and the type name.
:arg wanted: String or callable. If a string, get the corresponding
validation function from DEFAULT_TYPE_VALIDATORS. If callable,
get the name of the custom callable and return that for the type_checker.
:returns: Tuple of callable function or None, and a string that is the name
of the wanted type.
"""
# Use one of our builtin validators.
if not callable(wanted):
if wanted is None:
# Default type for parameters
wanted = 'str'
type_checker = DEFAULT_TYPE_VALIDATORS.get(wanted)
# Use the custom callable for validation.
else:
type_checker = wanted
wanted = getattr(wanted, '__name__', to_native(type(wanted)))
return type_checker, wanted
def _get_legal_inputs(argument_spec, parameters, aliases=None):
if aliases is None:
aliases = _handle_aliases(argument_spec, parameters)
return list(aliases.keys()) + list(argument_spec.keys())
def _get_unsupported_parameters(argument_spec, parameters, legal_inputs=None, options_context=None, store_supported=None):
"""Check keys in parameters against those provided in legal_inputs
to ensure they contain legal values. If legal_inputs are not supplied,
they will be generated using the argument_spec.
:arg argument_spec: Dictionary of parameters, their type, and valid values.
:arg parameters: Dictionary of parameters.
:arg legal_inputs: List of valid key names property names. Overrides values
in argument_spec.
:arg options_context: List of parent keys for tracking the context of where
a parameter is defined.
:returns: Set of unsupported parameters. Empty set if no unsupported parameters
are found.
"""
if legal_inputs is None:
legal_inputs = _get_legal_inputs(argument_spec, parameters)
unsupported_parameters = set()
for k in parameters.keys():
if k not in legal_inputs:
context = k
if options_context:
context = tuple(options_context + [k])
unsupported_parameters.add(context)
if store_supported is not None:
supported_aliases = _handle_aliases(argument_spec, parameters)
supported_params = []
for option in legal_inputs:
if option in supported_aliases:
continue
supported_params.append(option)
store_supported.update({context: (supported_params, supported_aliases)})
return unsupported_parameters
def _handle_aliases(argument_spec, parameters, alias_warnings=None, alias_deprecations=None):
"""Process aliases from an argument_spec including warnings and deprecations.
Modify ``parameters`` by adding a new key for each alias with the supplied
value from ``parameters``.
If a list is provided to the alias_warnings parameter, it will be filled with tuples
(option, alias) in every case where both an option and its alias are specified.
If a list is provided to alias_deprecations, it will be populated with dictionaries,
each containing deprecation information for each alias found in argument_spec.
:param argument_spec: Dictionary of parameters, their type, and valid values.
:type argument_spec: dict
:param parameters: Dictionary of parameters.
:type parameters: dict
:param alias_warnings:
:type alias_warnings: list
:param alias_deprecations:
:type alias_deprecations: list
"""
aliases_results = {} # alias:canon
for (k, v) in argument_spec.items():
aliases = v.get('aliases', None)
default = v.get('default', None)
required = v.get('required', False)
if alias_deprecations is not None:
for alias in argument_spec[k].get('deprecated_aliases', []):
if alias.get('name') in parameters:
alias_deprecations.append(alias)
if default is not None and required:
# not alias specific but this is a good place to check this
raise ValueError("internal error: required and default are mutually exclusive for %s" % k)
if aliases is None:
continue
if not is_iterable(aliases) or isinstance(aliases, (binary_type, text_type)):
raise TypeError('internal error: aliases must be a list or tuple')
for alias in aliases:
aliases_results[alias] = k
if alias in parameters:
if k in parameters and alias_warnings is not None:
alias_warnings.append((k, alias))
parameters[k] = parameters[alias]
return aliases_results
def _list_deprecations(argument_spec, parameters, prefix=''):
"""Return a list of deprecations
:arg argument_spec: An argument spec dictionary
:arg parameters: Dictionary of parameters
:returns: List of dictionaries containing a message and version in which
the deprecated parameter will be removed, or an empty list.
:Example return:
.. code-block:: python
[
{
'msg': "Param 'deptest' is deprecated. See the module docs for more information",
'version': '2.9'
}
]
"""
deprecations = []
for arg_name, arg_opts in argument_spec.items():
if arg_name in parameters:
if prefix:
sub_prefix = '%s["%s"]' % (prefix, arg_name)
else:
sub_prefix = arg_name
if arg_opts.get('removed_at_date') is not None:
deprecations.append({
'msg': "Param '%s' is deprecated. See the module docs for more information" % sub_prefix,
'date': arg_opts.get('removed_at_date'),
'collection_name': arg_opts.get('removed_from_collection'),
})
elif arg_opts.get('removed_in_version') is not None:
deprecations.append({
'msg': "Param '%s' is deprecated. See the module docs for more information" % sub_prefix,
'version': arg_opts.get('removed_in_version'),
'collection_name': arg_opts.get('removed_from_collection'),
})
# Check sub-argument spec
sub_argument_spec = arg_opts.get('options')
if sub_argument_spec is not None:
sub_arguments = parameters[arg_name]
if isinstance(sub_arguments, Mapping):
sub_arguments = [sub_arguments]
if isinstance(sub_arguments, list):
for sub_params in sub_arguments:
if isinstance(sub_params, Mapping):
deprecations.extend(_list_deprecations(sub_argument_spec, sub_params, prefix=sub_prefix))
return deprecations
def _list_no_log_values(argument_spec, params):
"""Return set of no log values
:arg argument_spec: An argument spec dictionary
:arg params: Dictionary of all parameters
:returns: :class:`set` of strings that should be hidden from output:
"""
no_log_values = set()
for arg_name, arg_opts in argument_spec.items():
if arg_opts.get('no_log', False):
# Find the value for the no_log'd param
no_log_object = params.get(arg_name, None)
if no_log_object:
try:
no_log_values.update(_return_datastructure_name(no_log_object))
except TypeError as e:
raise TypeError('Failed to convert "%s": %s' % (arg_name, to_native(e)))
# Get no_log values from suboptions
sub_argument_spec = arg_opts.get('options')
if sub_argument_spec is not None:
wanted_type = arg_opts.get('type')
sub_parameters = params.get(arg_name)
if sub_parameters is not None:
if wanted_type == 'dict' or (wanted_type == 'list' and arg_opts.get('elements', '') == 'dict'):
# Sub parameters can be a dict or list of dicts. Ensure parameters are always a list.
if not isinstance(sub_parameters, list):
sub_parameters = [sub_parameters]
for sub_param in sub_parameters:
# Validate dict fields in case they came in as strings
if isinstance(sub_param, string_types):
sub_param = check_type_dict(sub_param)
if not isinstance(sub_param, Mapping):
raise TypeError("Value '{1}' in the sub parameter field '{0}' must by a {2}, "
"not '{1.__class__.__name__}'".format(arg_name, sub_param, wanted_type))
no_log_values.update(_list_no_log_values(sub_argument_spec, sub_param))
return no_log_values
def _return_datastructure_name(obj):
""" Return native stringified values from datastructures.
For use with removing sensitive values pre-jsonification."""
if isinstance(obj, (text_type, binary_type)):
if obj:
yield to_native(obj, errors='surrogate_or_strict')
return
elif isinstance(obj, Mapping):
for element in obj.items():
for subelement in _return_datastructure_name(element[1]):
yield subelement
elif is_iterable(obj):
for element in obj:
for subelement in _return_datastructure_name(element):
yield subelement
elif obj is None or isinstance(obj, bool):
# This must come before int because bools are also ints
return
elif isinstance(obj, tuple(list(integer_types) + [float])):
yield to_native(obj, nonstring='simplerepr')
else:
raise TypeError('Unknown parameter type: %s' % (type(obj)))
def _remove_values_conditions(value, no_log_strings, deferred_removals):
"""
Helper function for :meth:`remove_values`.
:arg value: The value to check for strings that need to be stripped
:arg no_log_strings: set of strings which must be stripped out of any values
:arg deferred_removals: List which holds information about nested
containers that have to be iterated for removals. It is passed into
this function so that more entries can be added to it if value is
a container type. The format of each entry is a 2-tuple where the first
element is the ``value`` parameter and the second value is a new
container to copy the elements of ``value`` into once iterated.
:returns: if ``value`` is a scalar, returns ``value`` with two exceptions:
1. :class:`~datetime.datetime` objects which are changed into a string representation.
2. objects which are in ``no_log_strings`` are replaced with a placeholder
so that no sensitive data is leaked.
If ``value`` is a container type, returns a new empty container.
``deferred_removals`` is added to as a side-effect of this function.
.. warning:: It is up to the caller to make sure the order in which value
is passed in is correct. For instance, higher level containers need
to be passed in before lower level containers. For example, given
``{'level1': {'level2': 'level3': [True]} }`` first pass in the
dictionary for ``level1``, then the dict for ``level2``, and finally
the list for ``level3``.
"""
if isinstance(value, (text_type, binary_type)):
# Need native str type
native_str_value = value
if isinstance(value, text_type):
value_is_text = True
if PY2:
native_str_value = to_bytes(value, errors='surrogate_or_strict')
elif isinstance(value, binary_type):
value_is_text = False
if PY3:
native_str_value = to_text(value, errors='surrogate_or_strict')
if native_str_value in no_log_strings:
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
for omit_me in no_log_strings:
native_str_value = native_str_value.replace(omit_me, '*' * 8)
if value_is_text and isinstance(native_str_value, binary_type):
value = to_text(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
elif not value_is_text and isinstance(native_str_value, text_type):
value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
else:
value = native_str_value
elif isinstance(value, Sequence):
if isinstance(value, MutableSequence):
new_value = type(value)()
else:
new_value = [] # Need a mutable value
deferred_removals.append((value, new_value))
value = new_value
elif isinstance(value, Set):
if isinstance(value, MutableSet):
new_value = type(value)()
else:
new_value = set() # Need a mutable value
deferred_removals.append((value, new_value))
value = new_value
elif isinstance(value, Mapping):
if isinstance(value, MutableMapping):
new_value = type(value)()
else:
new_value = {} # Need a mutable value
deferred_removals.append((value, new_value))
value = new_value
elif isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))):
stringy_value = to_native(value, encoding='utf-8', errors='surrogate_or_strict')
if stringy_value in no_log_strings:
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
for omit_me in no_log_strings:
if omit_me in stringy_value:
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
elif isinstance(value, (datetime.datetime, datetime.date)):
value = value.isoformat()
else:
raise TypeError('Value of unknown type: %s, %s' % (type(value), value))
return value
def _set_defaults(argument_spec, parameters, set_default=True):
"""Set default values for parameters when no value is supplied.
Modifies parameters directly.
:arg argument_spec: Argument spec
:type argument_spec: dict
:arg parameters: Parameters to evaluate
:type parameters: dict
:kwarg set_default: Whether or not to set the default values
:type set_default: bool
:returns: Set of strings that should not be logged.
:rtype: set
"""
no_log_values = set()
for param, value in argument_spec.items():
# TODO: Change the default value from None to Sentinel to differentiate between
# user supplied None and a default value set by this function.
default = value.get('default', None)
# This prevents setting defaults on required items on the 1st run,
# otherwise will set things without a default to None on the 2nd.
if param not in parameters and (default is not None or set_default):
# Make sure any default value for no_log fields are masked.
if value.get('no_log', False) and default:
no_log_values.add(default)
parameters[param] = default
return no_log_values
def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals):
""" Helper method to :func:`sanitize_keys` to build ``deferred_removals`` and avoid deep recursion. """
if isinstance(value, (text_type, binary_type)):
return value
if isinstance(value, Sequence):
if isinstance(value, MutableSequence):
new_value = type(value)()
else:
new_value = [] # Need a mutable value
deferred_removals.append((value, new_value))
return new_value
if isinstance(value, Set):
if isinstance(value, MutableSet):
new_value = type(value)()
else:
new_value = set() # Need a mutable value
deferred_removals.append((value, new_value))
return new_value
if isinstance(value, Mapping):
if isinstance(value, MutableMapping):
new_value = type(value)()
else:
new_value = {} # Need a mutable value
deferred_removals.append((value, new_value))
return new_value
if isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))):
return value
if isinstance(value, (datetime.datetime, datetime.date)):
return value
raise TypeError('Value of unknown type: %s, %s' % (type(value), value))
def _validate_elements(wanted_type, parameter, values, options_context=None, errors=None):
if errors is None:
errors = AnsibleValidationErrorMultiple()
type_checker, wanted_element_type = _get_type_validator(wanted_type)
validated_parameters = []
# Get param name for strings so we can later display this value in a useful error message if needed
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
kwargs = {}
if wanted_element_type == 'str' and isinstance(wanted_type, string_types):
if isinstance(parameter, string_types):
kwargs['param'] = parameter
elif isinstance(parameter, dict):
kwargs['param'] = list(parameter.keys())[0]
for value in values:
try:
validated_parameters.append(type_checker(value, **kwargs))
except (TypeError, ValueError) as e:
msg = "Elements value for option '%s'" % parameter
if options_context:
msg += " found in '%s'" % " -> ".join(options_context)
msg += " is of type %s and we were unable to convert to %s: %s" % (type(value), wanted_element_type, to_native(e))
errors.append(ElementError(msg))
return validated_parameters
def _validate_argument_types(argument_spec, parameters, prefix='', options_context=None, errors=None):
"""Validate that parameter types match the type in the argument spec.
Determine the appropriate type checker function and run each
parameter value through that function. All error messages from type checker
functions are returned. If any parameter fails to validate, it will not
be in the returned parameters.
:arg argument_spec: Argument spec
:type argument_spec: dict
:arg parameters: Parameters
:type parameters: dict
:kwarg prefix: Name of the parent key that contains the spec. Used in the error message
:type prefix: str
:kwarg options_context: List of contexts?
:type options_context: list
:returns: Two item tuple containing validated and coerced parameters
and a list of any errors that were encountered.
:rtype: tuple
"""
if errors is None:
errors = AnsibleValidationErrorMultiple()
for param, spec in argument_spec.items():
if param not in parameters:
continue
value = parameters[param]
if value is None and not spec.get('required') and spec.get('default') is None:
continue
wanted_type = spec.get('type')
type_checker, wanted_name = _get_type_validator(wanted_type)
# Get param name for strings so we can later display this value in a useful error message if needed
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
kwargs = {}
if wanted_name == 'str' and isinstance(wanted_type, string_types):
kwargs['param'] = list(parameters.keys())[0]
# Get the name of the parent key if this is a nested option
if prefix:
kwargs['prefix'] = prefix
try:
parameters[param] = type_checker(value, **kwargs)
elements_wanted_type = spec.get('elements', None)
if elements_wanted_type:
elements = parameters[param]
if wanted_type != 'list' or not isinstance(elements, list):
msg = "Invalid type %s for option '%s'" % (wanted_name, elements)
if options_context:
msg += " found in '%s'." % " -> ".join(options_context)
msg += ", elements value check is supported only with 'list' type"
errors.append(ArgumentTypeError(msg))
parameters[param] = _validate_elements(elements_wanted_type, param, elements, options_context, errors)
except (TypeError, ValueError) as e:
msg = "argument '%s' is of type %s" % (param, type(value))
if options_context:
msg += " found in '%s'." % " -> ".join(options_context)
msg += " and we were unable to convert to %s: %s" % (wanted_name, to_native(e))
errors.append(ArgumentTypeError(msg))
def _validate_argument_values(argument_spec, parameters, options_context=None, errors=None):
"""Ensure all arguments have the requested values, and there are no stray arguments"""
if errors is None:
errors = AnsibleValidationErrorMultiple()
for param, spec in argument_spec.items():
choices = spec.get('choices')
if choices is None:
continue
if isinstance(choices, (frozenset, KeysView, Sequence)) and not isinstance(choices, (binary_type, text_type)):
if param in parameters:
# Allow one or more when type='list' param with choices
if isinstance(parameters[param], list):
diff_list = [item for item in parameters[param] if item not in choices]
if diff_list:
choices_str = ", ".join([to_native(c) for c in choices])
diff_str = ", ".join(diff_list)
msg = "value of %s must be one or more of: %s. Got no match for: %s" % (param, choices_str, diff_str)
if options_context:
msg = "{0} found in {1}".format(msg, " -> ".join(options_context))
errors.append(ArgumentValueError(msg))
elif parameters[param] not in choices:
# PyYaml converts certain strings to bools. If we can unambiguously convert back, do so before checking
# the value. If we can't figure this out, module author is responsible.
if parameters[param] == 'False':
overlap = BOOLEANS_FALSE.intersection(choices)
if len(overlap) == 1:
# Extract from a set
(parameters[param],) = overlap
if parameters[param] == 'True':
overlap = BOOLEANS_TRUE.intersection(choices)
if len(overlap) == 1:
(parameters[param],) = overlap
if parameters[param] not in choices:
choices_str = ", ".join([to_native(c) for c in choices])
msg = "value of %s must be one of: %s, got: %s" % (param, choices_str, parameters[param])
if options_context:
msg = "{0} found in {1}".format(msg, " -> ".join(options_context))
errors.append(ArgumentValueError(msg))
else:
msg = "internal error: choices for argument %s are not iterable: %s" % (param, choices)
if options_context:
msg = "{0} found in {1}".format(msg, " -> ".join(options_context))
errors.append(ArgumentTypeError(msg))
def _validate_sub_spec(
argument_spec,
parameters,
prefix="",
options_context=None,
errors=None,
no_log_values=None,
unsupported_parameters=None,
supported_parameters=None,
alias_deprecations=None,
):
"""Validate sub argument spec.
This function is recursive.
"""
if options_context is None:
options_context = []
if errors is None:
errors = AnsibleValidationErrorMultiple()
if no_log_values is None:
no_log_values = set()
if unsupported_parameters is None:
unsupported_parameters = set()
if supported_parameters is None:
supported_parameters = dict()
for param, value in argument_spec.items():
wanted = value.get('type')
if wanted == 'dict' or (wanted == 'list' and value.get('elements', '') == 'dict'):
sub_spec = value.get('options')
if value.get('apply_defaults', False):
if sub_spec is not None:
if parameters.get(param) is None:
parameters[param] = {}
else:
continue
elif sub_spec is None or param not in parameters or parameters[param] is None:
continue
# Keep track of context for warning messages
options_context.append(param)
# Make sure we can iterate over the elements
if not isinstance(parameters[param], Sequence) or isinstance(parameters[param], string_types):
elements = [parameters[param]]
else:
elements = parameters[param]
for idx, sub_parameters in enumerate(elements):
no_log_values.update(set_fallbacks(sub_spec, sub_parameters))
if not isinstance(sub_parameters, dict):
errors.append(SubParameterTypeError("value of '%s' must be of type dict or list of dicts" % param))
continue
# Set prefix for warning messages
new_prefix = prefix + param
if wanted == 'list':
new_prefix += '[%d]' % idx
new_prefix += '.'
alias_warnings = []
alias_deprecations_sub = []
try:
options_aliases = _handle_aliases(sub_spec, sub_parameters, alias_warnings, alias_deprecations_sub)
except (TypeError, ValueError) as e:
options_aliases = {}
errors.append(AliasError(to_native(e)))
for option, alias in alias_warnings:
warn('Both option %s%s and its alias %s%s are set.' % (new_prefix, option, new_prefix, alias))
if alias_deprecations is not None:
for deprecation in alias_deprecations_sub:
alias_deprecations.append({
'name': '%s%s' % (new_prefix, deprecation['name']),
'version': deprecation.get('version'),
'date': deprecation.get('date'),
'collection_name': deprecation.get('collection_name'),
})
try:
no_log_values.update(_list_no_log_values(sub_spec, sub_parameters))
except TypeError as te:
errors.append(NoLogError(to_native(te)))
legal_inputs = _get_legal_inputs(sub_spec, sub_parameters, options_aliases)
unsupported_parameters.update(
_get_unsupported_parameters(
sub_spec,
sub_parameters,
legal_inputs,
options_context,
store_supported=supported_parameters,
)
)
try:
check_mutually_exclusive(value.get('mutually_exclusive'), sub_parameters, options_context)
except TypeError as e:
errors.append(MutuallyExclusiveError(to_native(e)))
no_log_values.update(_set_defaults(sub_spec, sub_parameters, False))
try:
check_required_arguments(sub_spec, sub_parameters, options_context)
except TypeError as e:
errors.append(RequiredError(to_native(e)))
_validate_argument_types(sub_spec, sub_parameters, new_prefix, options_context, errors=errors)
_validate_argument_values(sub_spec, sub_parameters, options_context, errors=errors)
for check in _ADDITIONAL_CHECKS:
try:
check['func'](value.get(check['attr']), sub_parameters, options_context)
except TypeError as e:
errors.append(check['err'](to_native(e)))
no_log_values.update(_set_defaults(sub_spec, sub_parameters))
# Handle nested specs
_validate_sub_spec(
sub_spec, sub_parameters, new_prefix, options_context, errors, no_log_values,
unsupported_parameters, supported_parameters, alias_deprecations)
options_context.pop()
def env_fallback(*args, **kwargs):
"""Load value from environment variable"""
for arg in args:
if arg in os.environ:
return os.environ[arg]
raise AnsibleFallbackNotFound
def set_fallbacks(argument_spec, parameters):
no_log_values = set()
for param, value in argument_spec.items():
fallback = value.get('fallback', (None,))
fallback_strategy = fallback[0]
fallback_args = []
fallback_kwargs = {}
if param not in parameters and fallback_strategy is not None:
for item in fallback[1:]:
if isinstance(item, dict):
fallback_kwargs = item
else:
fallback_args = item
try:
fallback_value = fallback_strategy(*fallback_args, **fallback_kwargs)
except AnsibleFallbackNotFound:
continue
else:
if value.get('no_log', False) and fallback_value:
no_log_values.add(fallback_value)
parameters[param] = fallback_value
return no_log_values
def sanitize_keys(obj, no_log_strings, ignore_keys=frozenset()):
"""Sanitize the keys in a container object by removing ``no_log`` values from key names.
This is a companion function to the :func:`remove_values` function. Similar to that function,
we make use of ``deferred_removals`` to avoid hitting maximum recursion depth in cases of
large data structures.
:arg obj: The container object to sanitize. Non-container objects are returned unmodified.
:arg no_log_strings: A set of string values we do not want logged.
:kwarg ignore_keys: A set of string values of keys to not sanitize.
:returns: An object with sanitized keys.
"""
deferred_removals = deque()
no_log_strings = [to_native(s, errors='surrogate_or_strict') for s in no_log_strings]
new_value = _sanitize_keys_conditions(obj, no_log_strings, ignore_keys, deferred_removals)
while deferred_removals:
old_data, new_data = deferred_removals.popleft()
if isinstance(new_data, Mapping):
for old_key, old_elem in old_data.items():
if old_key in ignore_keys or old_key.startswith('_ansible'):
new_data[old_key] = _sanitize_keys_conditions(old_elem, no_log_strings, ignore_keys, deferred_removals)
else:
# Sanitize the old key. We take advantage of the sanitizing code in
# _remove_values_conditions() rather than recreating it here.
new_key = _remove_values_conditions(old_key, no_log_strings, None)
new_data[new_key] = _sanitize_keys_conditions(old_elem, no_log_strings, ignore_keys, deferred_removals)
else:
for elem in old_data:
new_elem = _sanitize_keys_conditions(elem, no_log_strings, ignore_keys, deferred_removals)
if isinstance(new_data, MutableSequence):
new_data.append(new_elem)
elif isinstance(new_data, MutableSet):
new_data.add(new_elem)
else:
raise TypeError('Unknown container type encountered when removing private values from keys')
return new_value
def remove_values(value, no_log_strings):
"""Remove strings in ``no_log_strings`` from value.
If value is a container type, then remove a lot more.
Use of ``deferred_removals`` exists, rather than a pure recursive solution,
because of the potential to hit the maximum recursion depth when dealing with
large amounts of data (see `issue #24560 <https://github.com/ansible/ansible/issues/24560>`_).
"""
deferred_removals = deque()
no_log_strings = [to_native(s, errors='surrogate_or_strict') for s in no_log_strings]
new_value = _remove_values_conditions(value, no_log_strings, deferred_removals)
while deferred_removals:
old_data, new_data = deferred_removals.popleft()
if isinstance(new_data, Mapping):
for old_key, old_elem in old_data.items():
new_elem = _remove_values_conditions(old_elem, no_log_strings, deferred_removals)
new_data[old_key] = new_elem
else:
for elem in old_data:
new_elem = _remove_values_conditions(elem, no_log_strings, deferred_removals)
if isinstance(new_data, MutableSequence):
new_data.append(new_elem)
elif isinstance(new_data, MutableSet):
new_data.add(new_elem)
else:
raise TypeError('Unknown container type encountered when removing private values from output')
return new_value
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,710 |
Configurable sampling/transfer of control-side task context metadata to targets
|
### Summary
We're often asked how to include arbitrary control-side contextual metadata with task invocations, and to include that metadata in target-side task log messages. e.g.: sending an AWX/Controller Job ID to the target hosts on each module invocation that occurred from that job, and logging it in the module-generated syslog/Windows Application Event Log entries for future correlation with the owning job.
I've not seen any consensus on precisely *which* data to include; one person's "critical forensic correlation data" is another's "unacceptable disclosure of sensitive execution detail". Seems like we'd need a generic facility to specify environment vars and/or hostvars to sample on the control host to be included with task invocations (under a reserved dictionary arg), and adjust the module logging APIs to include them.
My initial thought is to define a new core config element (defaulting to none) that allows the user to define a templated expression that would be rendered as part of each task's templating under a host context. The rendered result would be sent to modules as a new reserved internal module var. The module logging APIs would then include this value verbatim, when present. Other module code would also have access to the value, which could be used for anything. The new config would be settable either via ansible.cfg or an envvar, making it easier for AWX/Controller to later provide a mechanism to configure it for jobs using core versions that support it, while older versions would just silently ignore it.
Maybe something like:
```
ANSIBLE_ADDITIONAL_TASK_CONTEXT='{{awx_job_id}}'
```
When this config is non-empty, the defined template would be rendered for each task/host invocation, and its result included in a new `_ansible_additional_task_context` reserved module var. The resulting value, as with any Ansible template expression, could be of arbitrary complexity (eg, returning a data structure instead of just a scalar). The module logging APIs would include the serialized value verbatim in log messages when it is present, eg "ansible_additional_task_context=(whatever the value was)".
### Issue Type
Feature Idea
### Component Name
module invocation and logging
### Additional Information
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81710
|
https://github.com/ansible/ansible/pull/81711
|
4208bdbbcd994251579409ad533b40c9b0543550
|
1dd0d6fad70d7d4f423dac41822da65ff9ec95ef
| 2023-09-18T16:35:01Z |
python
| 2023-11-30T18:12:55Z |
lib/ansible/module_utils/csharp/Ansible.Basic.cs
|
using Microsoft.Win32.SafeHandles;
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Management.Automation;
using System.Management.Automation.Runspaces;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Security.AccessControl;
using System.Security.Principal;
#if CORECLR
using Newtonsoft.Json;
#else
using System.Web.Script.Serialization;
#endif
// Newtonsoft.Json may reference a different System.Runtime version (6.x) than loaded by PowerShell 7.3 (7.x).
// Ignore CS1701 so the code can be compiled when warnings are reported as errors.
//NoWarn -Name CS1701 -CLR Core
// System.Diagnostics.EventLog.dll reference different versioned dlls that are
// loaded in PSCore, ignore CS1702 so the code will ignore this warning
//NoWarn -Name CS1702 -CLR Core
//AssemblyReference -Type Newtonsoft.Json.JsonConvert -CLR Core
//AssemblyReference -Type System.Diagnostics.EventLog -CLR Core
//AssemblyReference -Type System.Security.AccessControl.NativeObjectSecurity -CLR Core
//AssemblyReference -Type System.Security.AccessControl.DirectorySecurity -CLR Core
//AssemblyReference -Type System.Security.Principal.IdentityReference -CLR Core
//AssemblyReference -Name System.Web.Extensions.dll -CLR Framework
namespace Ansible.Basic
{
public class AnsibleModule
{
public delegate void ExitHandler(int rc);
public static ExitHandler Exit = new ExitHandler(ExitModule);
public delegate void WriteLineHandler(string line);
public static WriteLineHandler WriteLine = new WriteLineHandler(WriteLineModule);
public static bool _DebugArgSpec = false;
private static List<string> BOOLEANS_TRUE = new List<string>() { "y", "yes", "on", "1", "true", "t", "1.0" };
private static List<string> BOOLEANS_FALSE = new List<string>() { "n", "no", "off", "0", "false", "f", "0.0" };
private bool ignoreUnknownOpts = false;
private string remoteTmp = Path.GetTempPath();
private string tmpdir = null;
private HashSet<string> noLogValues = new HashSet<string>();
private List<string> optionsContext = new List<string>();
private List<string> warnings = new List<string>();
private List<Dictionary<string, string>> deprecations = new List<Dictionary<string, string>>();
private List<string> cleanupFiles = new List<string>();
private Dictionary<string, string> passVars = new Dictionary<string, string>()
{
// null values means no mapping, not used in Ansible.Basic.AnsibleModule
{ "check_mode", "CheckMode" },
{ "debug", "DebugMode" },
{ "diff", "DiffMode" },
{ "keep_remote_files", "KeepRemoteFiles" },
{ "ignore_unknown_opts", "ignoreUnknownOpts" },
{ "module_name", "ModuleName" },
{ "no_log", "NoLog" },
{ "remote_tmp", "remoteTmp" },
{ "selinux_special_fs", null },
{ "shell_executable", null },
{ "socket", null },
{ "string_conversion_action", null },
{ "syslog_facility", null },
{ "tmpdir", "tmpdir" },
{ "verbosity", "Verbosity" },
{ "version", "AnsibleVersion" },
};
private List<string> passBools = new List<string>() { "check_mode", "debug", "diff", "keep_remote_files", "ignore_unknown_opts", "no_log" };
private List<string> passInts = new List<string>() { "verbosity" };
private Dictionary<string, List<object>> specDefaults = new Dictionary<string, List<object>>()
{
// key - (default, type) - null is freeform
{ "apply_defaults", new List<object>() { false, typeof(bool) } },
{ "aliases", new List<object>() { typeof(List<string>), typeof(List<string>) } },
{ "choices", new List<object>() { typeof(List<object>), typeof(List<object>) } },
{ "default", new List<object>() { null, null } },
{ "deprecated_aliases", new List<object>() { typeof(List<Hashtable>), typeof(List<Hashtable>) } },
{ "elements", new List<object>() { null, null } },
{ "mutually_exclusive", new List<object>() { typeof(List<List<string>>), typeof(List<object>) } },
{ "no_log", new List<object>() { false, typeof(bool) } },
{ "options", new List<object>() { typeof(Hashtable), typeof(Hashtable) } },
{ "removed_in_version", new List<object>() { null, typeof(string) } },
{ "removed_at_date", new List<object>() { null, typeof(DateTime) } },
{ "removed_from_collection", new List<object>() { null, typeof(string) } },
{ "required", new List<object>() { false, typeof(bool) } },
{ "required_by", new List<object>() { typeof(Hashtable), typeof(Hashtable) } },
{ "required_if", new List<object>() { typeof(List<List<object>>), typeof(List<object>) } },
{ "required_one_of", new List<object>() { typeof(List<List<string>>), typeof(List<object>) } },
{ "required_together", new List<object>() { typeof(List<List<string>>), typeof(List<object>) } },
{ "supports_check_mode", new List<object>() { false, typeof(bool) } },
{ "type", new List<object>() { "str", null } },
};
private Dictionary<string, Delegate> optionTypes = new Dictionary<string, Delegate>()
{
{ "bool", new Func<object, bool>(ParseBool) },
{ "dict", new Func<object, Dictionary<string, object>>(ParseDict) },
{ "float", new Func<object, float>(ParseFloat) },
{ "int", new Func<object, int>(ParseInt) },
{ "json", new Func<object, string>(ParseJson) },
{ "list", new Func<object, List<object>>(ParseList) },
{ "path", new Func<object, string>(ParsePath) },
{ "raw", new Func<object, object>(ParseRaw) },
{ "sid", new Func<object, SecurityIdentifier>(ParseSid) },
{ "str", new Func<object, string>(ParseStr) },
};
public Dictionary<string, object> Diff = new Dictionary<string, object>();
public IDictionary Params = null;
public Dictionary<string, object> Result = new Dictionary<string, object>() { { "changed", false } };
public bool CheckMode { get; private set; }
public bool DebugMode { get; private set; }
public bool DiffMode { get; private set; }
public bool KeepRemoteFiles { get; private set; }
public string ModuleName { get; private set; }
public bool NoLog { get; private set; }
public int Verbosity { get; private set; }
public string AnsibleVersion { get; private set; }
public string Tmpdir
{
get
{
if (tmpdir == null)
{
#if WINDOWS
SecurityIdentifier user = WindowsIdentity.GetCurrent().User;
DirectorySecurity dirSecurity = new DirectorySecurity();
dirSecurity.SetOwner(user);
dirSecurity.SetAccessRuleProtection(true, false); // disable inheritance rules
FileSystemAccessRule ace = new FileSystemAccessRule(user, FileSystemRights.FullControl,
InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit,
PropagationFlags.None, AccessControlType.Allow);
dirSecurity.AddAccessRule(ace);
string baseDir = Path.GetFullPath(Environment.ExpandEnvironmentVariables(remoteTmp));
if (!Directory.Exists(baseDir))
{
string failedMsg = null;
try
{
#if CORECLR
DirectoryInfo createdDir = Directory.CreateDirectory(baseDir);
FileSystemAclExtensions.SetAccessControl(createdDir, dirSecurity);
#else
Directory.CreateDirectory(baseDir, dirSecurity);
#endif
}
catch (Exception e)
{
failedMsg = String.Format("Failed to create base tmpdir '{0}': {1}", baseDir, e.Message);
}
if (failedMsg != null)
{
string envTmp = Path.GetTempPath();
Warn(String.Format("Unable to use '{0}' as temporary directory, falling back to system tmp '{1}': {2}", baseDir, envTmp, failedMsg));
baseDir = envTmp;
}
else
{
NTAccount currentUser = (NTAccount)user.Translate(typeof(NTAccount));
string warnMsg = String.Format("Module remote_tmp {0} did not exist and was created with FullControl to {1}, ", baseDir, currentUser.ToString());
warnMsg += "this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually";
Warn(warnMsg);
}
}
string dateTime = DateTime.Now.ToFileTime().ToString();
string dirName = String.Format("ansible-moduletmp-{0}-{1}-{2}", dateTime, System.Diagnostics.Process.GetCurrentProcess().Id,
new Random().Next(0, int.MaxValue));
string newTmpdir = Path.Combine(baseDir, dirName);
#if CORECLR
DirectoryInfo tmpdirInfo = Directory.CreateDirectory(newTmpdir);
FileSystemAclExtensions.SetAccessControl(tmpdirInfo, dirSecurity);
#else
Directory.CreateDirectory(newTmpdir, dirSecurity);
#endif
tmpdir = newTmpdir;
if (!KeepRemoteFiles)
cleanupFiles.Add(tmpdir);
#else
throw new NotImplementedException("Tmpdir is only supported on Windows");
#endif
}
return tmpdir;
}
}
public AnsibleModule(string[] args, IDictionary argumentSpec, IDictionary[] fragments = null)
{
// NoLog is not set yet, we cannot rely on FailJson to sanitize the output
// Do the minimum amount to get this running before we actually parse the params
Dictionary<string, string> aliases = new Dictionary<string, string>();
try
{
ValidateArgumentSpec(argumentSpec);
// Merge the fragments if present into the main arg spec.
if (fragments != null)
{
foreach (IDictionary fragment in fragments)
{
ValidateArgumentSpec(fragment);
MergeFragmentSpec(argumentSpec, fragment);
}
}
// Used by ansible-test to retrieve the module argument spec, not designed for public use.
if (_DebugArgSpec)
{
// Cannot call exit here because it will be caught with the catch (Exception e) below. Instead
// just throw a new exception with a specific message and the exception block will handle it.
ScriptBlock.Create("Set-Variable -Name ansibleTestArgSpec -Value $args[0] -Scope Global"
).Invoke(argumentSpec);
throw new Exception("ansible-test validate-modules check");
}
// Now make sure all the metadata keys are set to their defaults, this must be done after we've
// potentially output the arg spec for ansible-test.
SetArgumentSpecDefaults(argumentSpec);
Params = GetParams(args);
aliases = GetAliases(argumentSpec, Params);
SetNoLogValues(argumentSpec, Params);
}
catch (Exception e)
{
if (e.Message == "ansible-test validate-modules check")
Exit(0);
Dictionary<string, object> result = new Dictionary<string, object>
{
{ "failed", true },
{ "msg", String.Format("internal error: {0}", e.Message) },
{ "exception", e.ToString() }
};
WriteLine(ToJson(result));
Exit(1);
}
// Initialise public properties to the defaults before we parse the actual inputs
CheckMode = false;
DebugMode = false;
DiffMode = false;
KeepRemoteFiles = false;
ModuleName = "undefined win module";
NoLog = (bool)argumentSpec["no_log"];
Verbosity = 0;
AppDomain.CurrentDomain.ProcessExit += CleanupFiles;
List<string> legalInputs = passVars.Keys.Select(v => "_ansible_" + v).ToList();
legalInputs.AddRange(((IDictionary)argumentSpec["options"]).Keys.Cast<string>().ToList());
legalInputs.AddRange(aliases.Keys.Cast<string>().ToList());
CheckArguments(argumentSpec, Params, legalInputs);
// Set a Ansible friendly invocation value in the result object
Dictionary<string, object> invocation = new Dictionary<string, object>() { { "module_args", Params } };
Result["invocation"] = RemoveNoLogValues(invocation, noLogValues);
if (!NoLog)
LogEvent(String.Format("Invoked with:\r\n {0}", FormatLogData(Params, 2)), sanitise: false);
}
public static AnsibleModule Create(string[] args, IDictionary argumentSpec, IDictionary[] fragments = null)
{
return new AnsibleModule(args, argumentSpec, fragments);
}
public void Debug(string message)
{
if (DebugMode)
LogEvent(String.Format("[DEBUG] {0}", message));
}
public void Deprecate(string message, string version)
{
Deprecate(message, version, null);
}
public void Deprecate(string message, string version, string collectionName)
{
deprecations.Add(new Dictionary<string, string>() {
{ "msg", message }, { "version", version }, { "collection_name", collectionName } });
LogEvent(String.Format("[DEPRECATION WARNING] {0} {1}", message, version));
}
public void Deprecate(string message, DateTime date)
{
Deprecate(message, date, null);
}
public void Deprecate(string message, DateTime date, string collectionName)
{
string isoDate = date.ToString("yyyy-MM-dd");
deprecations.Add(new Dictionary<string, string>() {
{ "msg", message }, { "date", isoDate }, { "collection_name", collectionName } });
LogEvent(String.Format("[DEPRECATION WARNING] {0} {1}", message, isoDate));
}
public void ExitJson()
{
CleanupFiles(null, null);
WriteLine(GetFormattedResults(Result));
Exit(0);
}
public void FailJson(string message) { FailJson(message, null, null); }
public void FailJson(string message, ErrorRecord psErrorRecord) { FailJson(message, psErrorRecord, null); }
public void FailJson(string message, Exception exception) { FailJson(message, null, exception); }
private void FailJson(string message, ErrorRecord psErrorRecord, Exception exception)
{
Result["failed"] = true;
Result["msg"] = RemoveNoLogValues(message, noLogValues);
if (!Result.ContainsKey("exception") && (Verbosity > 2 || DebugMode))
{
if (psErrorRecord != null)
{
string traceback = String.Format("{0}\r\n{1}", psErrorRecord.ToString(), psErrorRecord.InvocationInfo.PositionMessage);
traceback += String.Format("\r\n + CategoryInfo : {0}", psErrorRecord.CategoryInfo.ToString());
traceback += String.Format("\r\n + FullyQualifiedErrorId : {0}", psErrorRecord.FullyQualifiedErrorId.ToString());
traceback += String.Format("\r\n\r\nScriptStackTrace:\r\n{0}", psErrorRecord.ScriptStackTrace);
Result["exception"] = traceback;
}
else if (exception != null)
Result["exception"] = exception.ToString();
}
CleanupFiles(null, null);
WriteLine(GetFormattedResults(Result));
Exit(1);
}
public void LogEvent(string message, EventLogEntryType logEntryType = EventLogEntryType.Information, bool sanitise = true)
{
if (NoLog)
return;
#if WINDOWS
string logSource = "Ansible";
bool logSourceExists = false;
try
{
logSourceExists = EventLog.SourceExists(logSource);
}
catch (System.Security.SecurityException) { } // non admin users may not have permission
if (!logSourceExists)
{
try
{
EventLog.CreateEventSource(logSource, "Application");
}
catch (System.Security.SecurityException)
{
// Cannot call Warn as that calls LogEvent and we get stuck in a loop
warnings.Add(String.Format("Access error when creating EventLog source {0}, logging to the Application source instead", logSource));
logSource = "Application";
}
}
if (sanitise)
message = (string)RemoveNoLogValues(message, noLogValues);
message = String.Format("{0} - {1}", ModuleName, message);
using (EventLog eventLog = new EventLog("Application"))
{
eventLog.Source = logSource;
try
{
eventLog.WriteEntry(message, logEntryType, 0);
}
catch (System.InvalidOperationException) { } // Ignore permission errors on the Application event log
catch (System.Exception e)
{
// Cannot call Warn as that calls LogEvent and we get stuck in a loop
warnings.Add(String.Format("Unknown error when creating event log entry: {0}", e.Message));
}
}
#else
// Windows Event Log is only available on Windows
return;
#endif
}
public void Warn(string message)
{
warnings.Add(message);
LogEvent(String.Format("[WARNING] {0}", message), EventLogEntryType.Warning);
}
public static object FromJson(string json) { return FromJson<object>(json); }
public static T FromJson<T>(string json)
{
#if CORECLR
return JsonConvert.DeserializeObject<T>(json);
#else
JavaScriptSerializer jss = new JavaScriptSerializer();
jss.MaxJsonLength = int.MaxValue;
jss.RecursionLimit = int.MaxValue;
return jss.Deserialize<T>(json);
#endif
}
public static string ToJson(object obj)
{
// Using PowerShell to serialize the JSON is preferable over the native .NET libraries as it handles
// PS Objects a lot better than the alternatives. In case we are debugging in Visual Studio we have a
// fallback to the other libraries as we won't be dealing with PowerShell objects there.
if (Runspace.DefaultRunspace != null)
{
PSObject rawOut = ScriptBlock.Create("ConvertTo-Json -InputObject $args[0] -Depth 99 -Compress").Invoke(obj)[0];
return rawOut.BaseObject as string;
}
else
{
#if CORECLR
return JsonConvert.SerializeObject(obj);
#else
JavaScriptSerializer jss = new JavaScriptSerializer();
jss.MaxJsonLength = int.MaxValue;
jss.RecursionLimit = int.MaxValue;
return jss.Serialize(obj);
#endif
}
}
public static IDictionary GetParams(string[] args)
{
if (args.Length > 0)
{
string inputJson = File.ReadAllText(args[0]);
Dictionary<string, object> rawParams = FromJson<Dictionary<string, object>>(inputJson);
if (!rawParams.ContainsKey("ANSIBLE_MODULE_ARGS"))
throw new ArgumentException("Module was unable to get ANSIBLE_MODULE_ARGS value from the argument path json");
return (IDictionary)rawParams["ANSIBLE_MODULE_ARGS"];
}
else
{
// $complex_args is already a Hashtable, no need to waste time converting to a dictionary
PSObject rawArgs = ScriptBlock.Create("$complex_args").Invoke()[0];
return rawArgs.BaseObject as Hashtable;
}
}
public static bool ParseBool(object value)
{
if (value.GetType() == typeof(bool))
return (bool)value;
List<string> booleans = new List<string>();
booleans.AddRange(BOOLEANS_TRUE);
booleans.AddRange(BOOLEANS_FALSE);
string stringValue = ParseStr(value).ToLowerInvariant().Trim();
if (BOOLEANS_TRUE.Contains(stringValue))
return true;
else if (BOOLEANS_FALSE.Contains(stringValue))
return false;
string msg = String.Format("The value '{0}' is not a valid boolean. Valid booleans include: {1}",
stringValue, String.Join(", ", booleans));
throw new ArgumentException(msg);
}
public static Dictionary<string, object> ParseDict(object value)
{
Type valueType = value.GetType();
if (valueType == typeof(Dictionary<string, object>))
return (Dictionary<string, object>)value;
else if (value is IDictionary)
return ((IDictionary)value).Cast<DictionaryEntry>().ToDictionary(kvp => (string)kvp.Key, kvp => kvp.Value);
else if (valueType == typeof(string))
{
string stringValue = (string)value;
if (stringValue.StartsWith("{") && stringValue.EndsWith("}"))
return FromJson<Dictionary<string, object>>((string)value);
else if (stringValue.IndexOfAny(new char[1] { '=' }) != -1)
{
List<string> fields = new List<string>();
List<char> fieldBuffer = new List<char>();
char? inQuote = null;
bool inEscape = false;
string field;
foreach (char c in stringValue.ToCharArray())
{
if (inEscape)
{
fieldBuffer.Add(c);
inEscape = false;
}
else if (c == '\\')
inEscape = true;
else if (inQuote == null && (c == '\'' || c == '"'))
inQuote = c;
else if (inQuote != null && c == inQuote)
inQuote = null;
else if (inQuote == null && (c == ',' || c == ' '))
{
field = String.Join("", fieldBuffer);
if (field != "")
fields.Add(field);
fieldBuffer = new List<char>();
}
else
fieldBuffer.Add(c);
}
field = String.Join("", fieldBuffer);
if (field != "")
fields.Add(field);
return fields.Distinct().Select(i => i.Split(new[] { '=' }, 2)).ToDictionary(i => i[0], i => i.Length > 1 ? (object)i[1] : null);
}
else
throw new ArgumentException("string cannot be converted to a dict, must either be a JSON string or in the key=value form");
}
throw new ArgumentException(String.Format("{0} cannot be converted to a dict", valueType.FullName));
}
public static float ParseFloat(object value)
{
if (value.GetType() == typeof(float))
return (float)value;
string valueStr = ParseStr(value);
return float.Parse(valueStr);
}
public static int ParseInt(object value)
{
Type valueType = value.GetType();
if (valueType == typeof(int))
return (int)value;
else
return Int32.Parse(ParseStr(value));
}
public static string ParseJson(object value)
{
// mostly used to ensure a dict is a json string as it may
// have been converted on the controller side
Type valueType = value.GetType();
if (value is IDictionary)
return ToJson(value);
else if (valueType == typeof(string))
return (string)value;
else
throw new ArgumentException(String.Format("{0} cannot be converted to json", valueType.FullName));
}
public static List<object> ParseList(object value)
{
if (value == null)
return null;
Type valueType = value.GetType();
if (valueType.IsGenericType && valueType.GetGenericTypeDefinition() == typeof(List<>))
return (List<object>)value;
else if (valueType == typeof(ArrayList))
return ((ArrayList)value).Cast<object>().ToList();
else if (valueType.IsArray)
return ((object[])value).ToList();
else if (valueType == typeof(string))
return ((string)value).Split(',').Select(s => s.Trim()).ToList<object>();
else if (valueType == typeof(int))
return new List<object>() { value };
else
throw new ArgumentException(String.Format("{0} cannot be converted to a list", valueType.FullName));
}
public static string ParsePath(object value)
{
string stringValue = ParseStr(value);
// do not validate, expand the env vars if it starts with \\?\ as
// it is a special path designed for the NT kernel to interpret
if (stringValue.StartsWith(@"\\?\"))
return stringValue;
stringValue = Environment.ExpandEnvironmentVariables(stringValue);
if (stringValue.IndexOfAny(Path.GetInvalidPathChars()) != -1)
throw new ArgumentException("string value contains invalid path characters, cannot convert to path");
// will fire an exception if it contains any invalid chars
Path.GetFullPath(stringValue);
return stringValue;
}
public static object ParseRaw(object value) { return value; }
public static SecurityIdentifier ParseSid(object value)
{
string stringValue = ParseStr(value);
try
{
return new SecurityIdentifier(stringValue);
}
catch (ArgumentException) { } // ignore failures string may not have been a SID
NTAccount account = new NTAccount(stringValue);
return (SecurityIdentifier)account.Translate(typeof(SecurityIdentifier));
}
public static string ParseStr(object value) { return value.ToString(); }
private void ValidateArgumentSpec(IDictionary argumentSpec)
{
Dictionary<string, object> changedValues = new Dictionary<string, object>();
foreach (DictionaryEntry entry in argumentSpec)
{
string key = (string)entry.Key;
// validate the key is a valid argument spec key
if (!specDefaults.ContainsKey(key))
{
string msg = String.Format("argument spec entry contains an invalid key '{0}', valid keys: {1}",
key, String.Join(", ", specDefaults.Keys));
throw new ArgumentException(FormatOptionsContext(msg, " - "));
}
// ensure the value is casted to the type we expect
Type optionType = null;
if (entry.Value != null)
optionType = (Type)specDefaults[key][1];
if (optionType != null)
{
Type actualType = entry.Value.GetType();
bool invalid = false;
if (optionType.IsGenericType && optionType.GetGenericTypeDefinition() == typeof(List<>))
{
// verify the actual type is not just a single value of the list type
Type entryType = optionType.GetGenericArguments()[0];
object[] arrayElementTypes = new object[]
{
null, // ArrayList does not have an ElementType
entryType,
typeof(object), // Hope the object is actually entryType or it can at least be casted.
};
bool isArray = entry.Value is IList && arrayElementTypes.Contains(actualType.GetElementType());
if (actualType == entryType || isArray)
{
object rawArray;
if (isArray)
rawArray = entry.Value;
else
rawArray = new object[1] { entry.Value };
MethodInfo castMethod = typeof(Enumerable).GetMethod("Cast").MakeGenericMethod(entryType);
MethodInfo toListMethod = typeof(Enumerable).GetMethod("ToList").MakeGenericMethod(entryType);
var enumerable = castMethod.Invoke(null, new object[1] { rawArray });
var newList = toListMethod.Invoke(null, new object[1] { enumerable });
changedValues.Add(key, newList);
}
else if (actualType != optionType && !(actualType == typeof(List<object>)))
invalid = true;
}
else
invalid = actualType != optionType;
if (invalid)
{
string msg = String.Format("argument spec for '{0}' did not match expected type {1}: actual type {2}",
key, optionType.FullName, actualType.FullName);
throw new ArgumentException(FormatOptionsContext(msg, " - "));
}
}
// recursively validate the spec
if (key == "options" && entry.Value != null)
{
IDictionary optionsSpec = (IDictionary)entry.Value;
foreach (DictionaryEntry optionEntry in optionsSpec)
{
optionsContext.Add((string)optionEntry.Key);
IDictionary optionMeta = (IDictionary)optionEntry.Value;
ValidateArgumentSpec(optionMeta);
optionsContext.RemoveAt(optionsContext.Count - 1);
}
}
// validate the type and elements key type values are known types
if (key == "type" || key == "elements" && entry.Value != null)
{
Type valueType = entry.Value.GetType();
if (valueType == typeof(string))
{
string typeValue = (string)entry.Value;
if (!optionTypes.ContainsKey(typeValue))
{
string msg = String.Format("{0} '{1}' is unsupported", key, typeValue);
msg = String.Format("{0}. Valid types are: {1}", FormatOptionsContext(msg, " - "), String.Join(", ", optionTypes.Keys));
throw new ArgumentException(msg);
}
}
else if (!(entry.Value is Delegate))
{
string msg = String.Format("{0} must either be a string or delegate, was: {1}", key, valueType.FullName);
throw new ArgumentException(FormatOptionsContext(msg, " - "));
}
}
}
// Outside of the spec iterator, change the values that were casted above
foreach (KeyValuePair<string, object> changedValue in changedValues)
argumentSpec[changedValue.Key] = changedValue.Value;
}
private void MergeFragmentSpec(IDictionary argumentSpec, IDictionary fragment)
{
foreach (DictionaryEntry fragmentEntry in fragment)
{
string fragmentKey = fragmentEntry.Key.ToString();
if (argumentSpec.Contains(fragmentKey))
{
// We only want to add new list entries and merge dictionary new keys and values. Leave the other
// values as is in the argument spec as that takes priority over the fragment.
if (fragmentEntry.Value is IDictionary)
{
MergeFragmentSpec((IDictionary)argumentSpec[fragmentKey], (IDictionary)fragmentEntry.Value);
}
else if (fragmentEntry.Value is IList)
{
IList specValue = (IList)argumentSpec[fragmentKey];
foreach (object fragmentValue in (IList)fragmentEntry.Value)
specValue.Add(fragmentValue);
}
}
else
argumentSpec[fragmentKey] = fragmentEntry.Value;
}
}
private void SetArgumentSpecDefaults(IDictionary argumentSpec)
{
foreach (KeyValuePair<string, List<object>> metadataEntry in specDefaults)
{
List<object> defaults = metadataEntry.Value;
object defaultValue = defaults[0];
if (defaultValue != null && defaultValue.GetType() == typeof(Type).GetType())
defaultValue = Activator.CreateInstance((Type)defaultValue);
if (!argumentSpec.Contains(metadataEntry.Key))
argumentSpec[metadataEntry.Key] = defaultValue;
}
// Recursively set the defaults for any inner options.
foreach (DictionaryEntry entry in argumentSpec)
{
if (entry.Value == null || entry.Key.ToString() != "options")
continue;
IDictionary optionsSpec = (IDictionary)entry.Value;
foreach (DictionaryEntry optionEntry in optionsSpec)
{
optionsContext.Add((string)optionEntry.Key);
IDictionary optionMeta = (IDictionary)optionEntry.Value;
SetArgumentSpecDefaults(optionMeta);
optionsContext.RemoveAt(optionsContext.Count - 1);
}
}
}
private Dictionary<string, string> GetAliases(IDictionary argumentSpec, IDictionary parameters)
{
Dictionary<string, string> aliasResults = new Dictionary<string, string>();
foreach (DictionaryEntry entry in (IDictionary)argumentSpec["options"])
{
string k = (string)entry.Key;
Hashtable v = (Hashtable)entry.Value;
List<string> aliases = (List<string>)v["aliases"];
object defaultValue = v["default"];
bool required = (bool)v["required"];
if (defaultValue != null && required)
throw new ArgumentException(String.Format("required and default are mutually exclusive for {0}", k));
foreach (string alias in aliases)
{
aliasResults.Add(alias, k);
if (parameters.Contains(alias))
parameters[k] = parameters[alias];
}
List<Hashtable> deprecatedAliases = (List<Hashtable>)v["deprecated_aliases"];
foreach (Hashtable depInfo in deprecatedAliases)
{
foreach (string keyName in new List<string> { "name" })
{
if (!depInfo.ContainsKey(keyName))
{
string msg = String.Format("{0} is required in a deprecated_aliases entry", keyName);
throw new ArgumentException(FormatOptionsContext(msg, " - "));
}
}
if (!depInfo.ContainsKey("version") && !depInfo.ContainsKey("date"))
{
string msg = "One of version or date is required in a deprecated_aliases entry";
throw new ArgumentException(FormatOptionsContext(msg, " - "));
}
if (depInfo.ContainsKey("version") && depInfo.ContainsKey("date"))
{
string msg = "Only one of version or date is allowed in a deprecated_aliases entry";
throw new ArgumentException(FormatOptionsContext(msg, " - "));
}
if (depInfo.ContainsKey("date") && depInfo["date"].GetType() != typeof(DateTime))
{
string msg = "A deprecated_aliases date must be a DateTime object";
throw new ArgumentException(FormatOptionsContext(msg, " - "));
}
string collectionName = null;
if (depInfo.ContainsKey("collection_name"))
{
collectionName = (string)depInfo["collection_name"];
}
string aliasName = (string)depInfo["name"];
if (parameters.Contains(aliasName))
{
string msg = String.Format("Alias '{0}' is deprecated. See the module docs for more information", aliasName);
if (depInfo.ContainsKey("version"))
{
string depVersion = (string)depInfo["version"];
Deprecate(FormatOptionsContext(msg, " - "), depVersion, collectionName);
}
if (depInfo.ContainsKey("date"))
{
DateTime depDate = (DateTime)depInfo["date"];
Deprecate(FormatOptionsContext(msg, " - "), depDate, collectionName);
}
}
}
}
return aliasResults;
}
private void SetNoLogValues(IDictionary argumentSpec, IDictionary parameters)
{
foreach (DictionaryEntry entry in (IDictionary)argumentSpec["options"])
{
string k = (string)entry.Key;
Hashtable v = (Hashtable)entry.Value;
if ((bool)v["no_log"])
{
object noLogObject = parameters.Contains(k) ? parameters[k] : null;
string noLogString = noLogObject == null ? "" : noLogObject.ToString();
if (!String.IsNullOrEmpty(noLogString))
noLogValues.Add(noLogString);
}
string collectionName = null;
if (v.ContainsKey("removed_from_collection"))
{
collectionName = (string)v["removed_from_collection"];
}
object removedInVersion = v["removed_in_version"];
if (removedInVersion != null && parameters.Contains(k))
Deprecate(String.Format("Param '{0}' is deprecated. See the module docs for more information", k),
removedInVersion.ToString(), collectionName);
object removedAtDate = v["removed_at_date"];
if (removedAtDate != null && parameters.Contains(k))
Deprecate(String.Format("Param '{0}' is deprecated. See the module docs for more information", k),
(DateTime)removedAtDate, collectionName);
}
}
private void CheckArguments(IDictionary spec, IDictionary param, List<string> legalInputs)
{
// initially parse the params and check for unsupported ones and set internal vars
CheckUnsupportedArguments(param, legalInputs);
// Only run this check if we are at the root argument (optionsContext.Count == 0)
if (CheckMode && !(bool)spec["supports_check_mode"] && optionsContext.Count == 0)
{
Result["skipped"] = true;
Result["msg"] = String.Format("remote module ({0}) does not support check mode", ModuleName);
ExitJson();
}
IDictionary optionSpec = (IDictionary)spec["options"];
CheckMutuallyExclusive(param, (IList)spec["mutually_exclusive"]);
CheckRequiredArguments(optionSpec, param);
// set the parameter types based on the type spec value
foreach (DictionaryEntry entry in optionSpec)
{
string k = (string)entry.Key;
Hashtable v = (Hashtable)entry.Value;
object value = param.Contains(k) ? param[k] : null;
if (value != null)
{
// convert the current value to the wanted type
Delegate typeConverter;
string type;
if (v["type"].GetType() == typeof(string))
{
type = (string)v["type"];
typeConverter = optionTypes[type];
}
else
{
type = "delegate";
typeConverter = (Delegate)v["type"];
}
try
{
value = typeConverter.DynamicInvoke(value);
param[k] = value;
}
catch (Exception e)
{
string msg = String.Format("argument for {0} is of type {1} and we were unable to convert to {2}: {3}",
k, value.GetType(), type, e.InnerException.Message);
FailJson(FormatOptionsContext(msg));
}
// ensure it matches the choices if there are choices set
List<string> choices = ((List<object>)v["choices"]).Select(x => x.ToString()).Cast<string>().ToList();
if (choices.Count > 0)
{
List<string> values;
string choiceMsg;
if (type == "list")
{
values = ((List<object>)value).Select(x => x.ToString()).Cast<string>().ToList();
choiceMsg = "one or more of";
}
else
{
values = new List<string>() { value.ToString() };
choiceMsg = "one of";
}
List<string> diffList = values.Except(choices, StringComparer.OrdinalIgnoreCase).ToList();
List<string> caseDiffList = values.Except(choices).ToList();
if (diffList.Count > 0)
{
string msg = String.Format("value of {0} must be {1}: {2}. Got no match for: {3}",
k, choiceMsg, String.Join(", ", choices), String.Join(", ", diffList));
FailJson(FormatOptionsContext(msg));
}
/*
For now we will just silently accept case insensitive choices, uncomment this if we want to add it back in
else if (caseDiffList.Count > 0)
{
// For backwards compatibility with Legacy.psm1 we need to be matching choices that are not case sensitive.
// We will warn the user it was case insensitive and tell them this will become case sensitive in the future.
string msg = String.Format(
"value of {0} was a case insensitive match of {1}: {2}. Checking of choices will be case sensitive in a future Ansible release. Case insensitive matches were: {3}",
k, choiceMsg, String.Join(", ", choices), String.Join(", ", caseDiffList.Select(x => RemoveNoLogValues(x, noLogValues)))
);
Warn(FormatOptionsContext(msg));
}*/
}
}
}
CheckRequiredTogether(param, (IList)spec["required_together"]);
CheckRequiredOneOf(param, (IList)spec["required_one_of"]);
CheckRequiredIf(param, (IList)spec["required_if"]);
CheckRequiredBy(param, (IDictionary)spec["required_by"]);
// finally ensure all missing parameters are set to null and handle sub options
foreach (DictionaryEntry entry in optionSpec)
{
string k = (string)entry.Key;
IDictionary v = (IDictionary)entry.Value;
if (!param.Contains(k))
param[k] = null;
CheckSubOption(param, k, v);
}
}
private void CheckUnsupportedArguments(IDictionary param, List<string> legalInputs)
{
HashSet<string> unsupportedParameters = new HashSet<string>();
HashSet<string> caseUnsupportedParameters = new HashSet<string>();
List<string> removedParameters = new List<string>();
foreach (DictionaryEntry entry in param)
{
string paramKey = (string)entry.Key;
if (!legalInputs.Contains(paramKey, StringComparer.OrdinalIgnoreCase))
unsupportedParameters.Add(paramKey);
else if (!legalInputs.Contains(paramKey))
// For backwards compatibility we do not care about the case but we need to warn the users as this will
// change in a future Ansible release.
caseUnsupportedParameters.Add(paramKey);
else if (paramKey.StartsWith("_ansible_"))
{
removedParameters.Add(paramKey);
string key = paramKey.Replace("_ansible_", "");
// skip setting NoLog if NoLog is already set to true (set by the module)
// or there's no mapping for this key
if ((key == "no_log" && NoLog == true) || (passVars[key] == null))
continue;
object value = entry.Value;
if (passBools.Contains(key))
value = ParseBool(value);
else if (passInts.Contains(key))
value = ParseInt(value);
string propertyName = passVars[key];
PropertyInfo property = typeof(AnsibleModule).GetProperty(propertyName);
FieldInfo field = typeof(AnsibleModule).GetField(propertyName, BindingFlags.NonPublic | BindingFlags.Instance);
if (property != null)
property.SetValue(this, value, null);
else if (field != null)
field.SetValue(this, value);
else
FailJson(String.Format("implementation error: unknown AnsibleModule property {0}", propertyName));
}
}
foreach (string parameter in removedParameters)
param.Remove(parameter);
if (unsupportedParameters.Count > 0 && !ignoreUnknownOpts)
{
legalInputs.RemoveAll(x => passVars.Keys.Contains(x.Replace("_ansible_", "")));
string msg = String.Format("Unsupported parameters for ({0}) module: {1}", ModuleName, String.Join(", ", unsupportedParameters));
msg = String.Format("{0}. Supported parameters include: {1}", FormatOptionsContext(msg), String.Join(", ", legalInputs));
FailJson(msg);
}
/*
// Uncomment when we want to start warning users around options that are not a case sensitive match to the spec
if (caseUnsupportedParameters.Count > 0)
{
legalInputs.RemoveAll(x => passVars.Keys.Contains(x.Replace("_ansible_", "")));
string msg = String.Format("Parameters for ({0}) was a case insensitive match: {1}", ModuleName, String.Join(", ", caseUnsupportedParameters));
msg = String.Format("{0}. Module options will become case sensitive in a future Ansible release. Supported parameters include: {1}",
FormatOptionsContext(msg), String.Join(", ", legalInputs));
Warn(msg);
}*/
// Make sure we convert all the incorrect case params to the ones set by the module spec
foreach (string key in caseUnsupportedParameters)
{
string correctKey = legalInputs[legalInputs.FindIndex(s => s.Equals(key, StringComparison.OrdinalIgnoreCase))];
object value = param[key];
param.Remove(key);
param.Add(correctKey, value);
}
}
private void CheckMutuallyExclusive(IDictionary param, IList mutuallyExclusive)
{
if (mutuallyExclusive == null)
return;
foreach (object check in mutuallyExclusive)
{
List<string> mutualCheck = ((IList)check).Cast<string>().ToList();
int count = 0;
foreach (string entry in mutualCheck)
if (param.Contains(entry))
count++;
if (count > 1)
{
string msg = String.Format("parameters are mutually exclusive: {0}", String.Join(", ", mutualCheck));
FailJson(FormatOptionsContext(msg));
}
}
}
private void CheckRequiredArguments(IDictionary spec, IDictionary param)
{
List<string> missing = new List<string>();
foreach (DictionaryEntry entry in spec)
{
string k = (string)entry.Key;
Hashtable v = (Hashtable)entry.Value;
// set defaults for values not already set
object defaultValue = v["default"];
if (defaultValue != null && !param.Contains(k))
param[k] = defaultValue;
// check required arguments
bool required = (bool)v["required"];
if (required && !param.Contains(k))
missing.Add(k);
}
if (missing.Count > 0)
{
string msg = String.Format("missing required arguments: {0}", String.Join(", ", missing));
FailJson(FormatOptionsContext(msg));
}
}
private void CheckRequiredTogether(IDictionary param, IList requiredTogether)
{
if (requiredTogether == null)
return;
foreach (object check in requiredTogether)
{
List<string> requiredCheck = ((IList)check).Cast<string>().ToList();
List<bool> found = new List<bool>();
foreach (string field in requiredCheck)
if (param.Contains(field))
found.Add(true);
else
found.Add(false);
if (found.Contains(true) && found.Contains(false))
{
string msg = String.Format("parameters are required together: {0}", String.Join(", ", requiredCheck));
FailJson(FormatOptionsContext(msg));
}
}
}
private void CheckRequiredOneOf(IDictionary param, IList requiredOneOf)
{
if (requiredOneOf == null)
return;
foreach (object check in requiredOneOf)
{
List<string> requiredCheck = ((IList)check).Cast<string>().ToList();
int count = 0;
foreach (string field in requiredCheck)
if (param.Contains(field))
count++;
if (count == 0)
{
string msg = String.Format("one of the following is required: {0}", String.Join(", ", requiredCheck));
FailJson(FormatOptionsContext(msg));
}
}
}
private void CheckRequiredIf(IDictionary param, IList requiredIf)
{
if (requiredIf == null)
return;
foreach (object check in requiredIf)
{
IList requiredCheck = (IList)check;
List<string> missing = new List<string>();
List<string> missingFields = new List<string>();
int maxMissingCount = 1;
bool oneRequired = false;
if (requiredCheck.Count < 3 && requiredCheck.Count < 4)
FailJson(String.Format("internal error: invalid required_if value count of {0}, expecting 3 or 4 entries", requiredCheck.Count));
else if (requiredCheck.Count == 4)
oneRequired = (bool)requiredCheck[3];
string key = (string)requiredCheck[0];
object val = requiredCheck[1];
IList requirements = (IList)requiredCheck[2];
if (ParseStr(param[key]) != ParseStr(val))
continue;
string term = "all";
if (oneRequired)
{
maxMissingCount = requirements.Count;
term = "any";
}
foreach (string required in requirements.Cast<string>())
if (!param.Contains(required))
missing.Add(required);
if (missing.Count >= maxMissingCount)
{
string msg = String.Format("{0} is {1} but {2} of the following are missing: {3}",
key, val.ToString(), term, String.Join(", ", missing));
FailJson(FormatOptionsContext(msg));
}
}
}
private void CheckRequiredBy(IDictionary param, IDictionary requiredBy)
{
foreach (DictionaryEntry entry in requiredBy)
{
string key = (string)entry.Key;
if (!param.Contains(key))
continue;
List<string> missing = new List<string>();
List<string> requires = ParseList(entry.Value).Cast<string>().ToList();
foreach (string required in requires)
if (!param.Contains(required))
missing.Add(required);
if (missing.Count > 0)
{
string msg = String.Format("missing parameter(s) required by '{0}': {1}", key, String.Join(", ", missing));
FailJson(FormatOptionsContext(msg));
}
}
}
private void CheckSubOption(IDictionary param, string key, IDictionary spec)
{
object value = param[key];
string type;
if (spec["type"].GetType() == typeof(string))
type = (string)spec["type"];
else
type = "delegate";
string elements = null;
Delegate typeConverter = null;
if (spec["elements"] != null && spec["elements"].GetType() == typeof(string))
{
elements = (string)spec["elements"];
typeConverter = optionTypes[elements];
}
else if (spec["elements"] != null)
{
elements = "delegate";
typeConverter = (Delegate)spec["elements"];
}
if (!(type == "dict" || (type == "list" && elements != null)))
// either not a dict, or list with the elements set, so continue
return;
else if (type == "list")
{
// cast each list element to the type specified
if (value == null)
return;
List<object> newValue = new List<object>();
foreach (object element in (List<object>)value)
{
if (elements == "dict")
newValue.Add(ParseSubSpec(spec, element, key));
else
{
try
{
object newElement = typeConverter.DynamicInvoke(element);
newValue.Add(newElement);
}
catch (Exception e)
{
string msg = String.Format("argument for list entry {0} is of type {1} and we were unable to convert to {2}: {3}",
key, element.GetType(), elements, e.Message);
FailJson(FormatOptionsContext(msg));
}
}
}
param[key] = newValue;
}
else
param[key] = ParseSubSpec(spec, value, key);
}
private object ParseSubSpec(IDictionary spec, object value, string context)
{
bool applyDefaults = (bool)spec["apply_defaults"];
// set entry to an empty dict if apply_defaults is set
IDictionary optionsSpec = (IDictionary)spec["options"];
if (applyDefaults && optionsSpec.Keys.Count > 0 && value == null)
value = new Dictionary<string, object>();
else if (optionsSpec.Keys.Count == 0 || value == null)
return value;
optionsContext.Add(context);
Dictionary<string, object> newValue = (Dictionary<string, object>)ParseDict(value);
Dictionary<string, string> aliases = GetAliases(spec, newValue);
SetNoLogValues(spec, newValue);
List<string> subLegalInputs = optionsSpec.Keys.Cast<string>().ToList();
subLegalInputs.AddRange(aliases.Keys.Cast<string>().ToList());
CheckArguments(spec, newValue, subLegalInputs);
optionsContext.RemoveAt(optionsContext.Count - 1);
return newValue;
}
private string GetFormattedResults(Dictionary<string, object> result)
{
if (!result.ContainsKey("invocation"))
result["invocation"] = new Dictionary<string, object>() { { "module_args", RemoveNoLogValues(Params, noLogValues) } };
if (warnings.Count > 0)
result["warnings"] = warnings;
if (deprecations.Count > 0)
result["deprecations"] = deprecations;
if (Diff.Count > 0 && DiffMode)
result["diff"] = Diff;
return ToJson(result);
}
private string FormatLogData(object data, int indentLevel)
{
if (data == null)
return "$null";
string msg = "";
if (data is IList)
{
string newMsg = "";
foreach (object value in (IList)data)
{
string entryValue = FormatLogData(value, indentLevel + 2);
newMsg += String.Format("\r\n{0}- {1}", new String(' ', indentLevel), entryValue);
}
msg += newMsg;
}
else if (data is IDictionary)
{
bool start = true;
foreach (DictionaryEntry entry in (IDictionary)data)
{
string newMsg = FormatLogData(entry.Value, indentLevel + 2);
if (!start)
msg += String.Format("\r\n{0}", new String(' ', indentLevel));
msg += String.Format("{0}: {1}", (string)entry.Key, newMsg);
start = false;
}
}
else
msg = (string)RemoveNoLogValues(ParseStr(data), noLogValues);
return msg;
}
private object RemoveNoLogValues(object value, HashSet<string> noLogStrings)
{
Queue<Tuple<object, object>> deferredRemovals = new Queue<Tuple<object, object>>();
object newValue = RemoveValueConditions(value, noLogStrings, deferredRemovals);
while (deferredRemovals.Count > 0)
{
Tuple<object, object> data = deferredRemovals.Dequeue();
object oldData = data.Item1;
object newData = data.Item2;
if (oldData is IDictionary)
{
foreach (DictionaryEntry entry in (IDictionary)oldData)
{
object newElement = RemoveValueConditions(entry.Value, noLogStrings, deferredRemovals);
((IDictionary)newData).Add((string)entry.Key, newElement);
}
}
else
{
foreach (object element in (IList)oldData)
{
object newElement = RemoveValueConditions(element, noLogStrings, deferredRemovals);
((IList)newData).Add(newElement);
}
}
}
return newValue;
}
private object RemoveValueConditions(object value, HashSet<string> noLogStrings, Queue<Tuple<object, object>> deferredRemovals)
{
if (value == null)
return value;
Type valueType = value.GetType();
HashSet<Type> numericTypes = new HashSet<Type>
{
typeof(byte), typeof(sbyte), typeof(short), typeof(ushort), typeof(int), typeof(uint),
typeof(long), typeof(ulong), typeof(decimal), typeof(double), typeof(float)
};
if (numericTypes.Contains(valueType) || valueType == typeof(bool))
{
string valueString = ParseStr(value);
if (noLogStrings.Contains(valueString))
return "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER";
foreach (string omitMe in noLogStrings)
if (valueString.Contains(omitMe))
return "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER";
}
else if (valueType == typeof(DateTime))
value = ((DateTime)value).ToString("o");
else if (value is IList)
{
List<object> newValue = new List<object>();
deferredRemovals.Enqueue(new Tuple<object, object>((IList)value, newValue));
value = newValue;
}
else if (value is IDictionary)
{
Hashtable newValue = new Hashtable();
deferredRemovals.Enqueue(new Tuple<object, object>((IDictionary)value, newValue));
value = newValue;
}
else
{
string stringValue = value.ToString();
if (noLogStrings.Contains(stringValue))
return "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER";
foreach (string omitMe in noLogStrings)
if (stringValue.Contains(omitMe))
return (stringValue).Replace(omitMe, "********");
value = stringValue;
}
return value;
}
private void CleanupFiles(object s, EventArgs ev)
{
foreach (string path in cleanupFiles)
{
try
{
#if WINDOWS
FileCleaner.Delete(path);
#else
if (File.Exists(path))
File.Delete(path);
else if (Directory.Exists(path))
Directory.Delete(path, true);
#endif
}
catch (Exception e)
{
Warn(string.Format("Failure cleaning temp path '{0}': {1} {2}",
path, e.GetType().Name, e.Message));
}
}
cleanupFiles = new List<string>();
}
private string FormatOptionsContext(string msg, string prefix = " ")
{
if (optionsContext.Count > 0)
msg += String.Format("{0}found in {1}", prefix, String.Join(" -> ", optionsContext));
return msg;
}
[DllImport("kernel32.dll")]
private static extern IntPtr GetConsoleWindow();
private static void ExitModule(int rc)
{
// When running in a Runspace Environment.Exit will kill the entire
// process which is not what we want, detect if we are in a
// Runspace and call a ScriptBlock with exit instead.
if (Runspace.DefaultRunspace != null)
ScriptBlock.Create("Set-Variable -Name LASTEXITCODE -Value $args[0] -Scope Global; exit $args[0]").Invoke(rc);
else
{
// Used for local debugging in Visual Studio
if (System.Diagnostics.Debugger.IsAttached)
{
Console.WriteLine("Press enter to continue...");
Console.ReadLine();
}
Environment.Exit(rc);
}
}
private static void WriteLineModule(string line)
{
Console.WriteLine(line);
}
}
#if WINDOWS
// Windows is tricky as AVs and other software might still
// have an open handle to files causing a failure. Use a
// custom deletion mechanism to remove the files/dirs.
// https://github.com/ansible/ansible/pull/80247
internal static class FileCleaner
{
private const int FileDispositionInformation = 13;
private const int FileDispositionInformationEx = 64;
private const int ERROR_INVALID_PARAMETER = 0x00000057;
private const int ERROR_DIR_NOT_EMPTY = 0x00000091;
private static bool? _supportsPosixDelete = null;
[Flags()]
public enum DispositionFlags : uint
{
FILE_DISPOSITION_DO_NOT_DELETE = 0x00000000,
FILE_DISPOSITION_DELETE = 0x00000001,
FILE_DISPOSITION_POSIX_SEMANTICS = 0x00000002,
FILE_DISPOSITION_FORCE_IMAGE_SECTION_CHECK = 0x00000004,
FILE_DISPOSITION_ON_CLOSE = 0x00000008,
FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE = 0x00000010,
}
[Flags()]
public enum FileFlags : uint
{
FILE_FLAG_OPEN_NO_RECALL = 0x00100000,
FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000,
FILE_FLAG_SESSION_AWARE = 0x00800000,
FILE_FLAG_POSIX_SEMANTICS = 0x01000000,
FILE_FLAG_BACKUP_SEMANTICS = 0x02000000,
FILE_FLAG_DELETE_ON_CLOSE = 0x04000000,
FILE_FLAG_SEQUENTIAL_SCAN = 0x08000000,
FILE_FLAG_RANDOM_ACCESS = 0x10000000,
FILE_FLAG_NO_BUFFERING = 0x20000000,
FILE_FLAG_OVERLAPPED = 0x40000000,
FILE_FLAG_WRITE_THROUGH = 0x80000000,
}
[DllImport("Kernel32.dll", CharSet = CharSet.Unicode, SetLastError = true)]
private static extern SafeFileHandle CreateFileW(
[MarshalAs(UnmanagedType.LPWStr)] string lpFileName,
FileSystemRights dwDesiredAccess,
FileShare dwShareMode,
IntPtr lpSecurityAttributes,
FileMode dwCreationDisposition,
uint dwFlagsAndAttributes,
IntPtr hTemplateFile);
private static SafeFileHandle CreateFile(string path, FileSystemRights access, FileShare share, FileMode mode,
FileAttributes attributes, FileFlags flags)
{
uint flagsAndAttributes = (uint)attributes | (uint)flags;
SafeFileHandle handle = CreateFileW(path, access, share, IntPtr.Zero, mode, flagsAndAttributes,
IntPtr.Zero);
if (handle.IsInvalid)
{
int errCode = Marshal.GetLastWin32Error();
string msg = string.Format("CreateFileW({0}) failed 0x{1:X8}: {2}",
path, errCode, new Win32Exception(errCode).Message);
throw new Win32Exception(errCode, msg);
}
return handle;
}
[DllImport("Ntdll.dll")]
private static extern int NtSetInformationFile(
SafeFileHandle FileHandle,
out IntPtr IoStatusBlock,
ref int FileInformation,
int Length,
int FileInformationClass);
[DllImport("Ntdll.dll")]
private static extern int RtlNtStatusToDosError(
int Status);
public static void Delete(string path)
{
if (File.Exists(path))
{
DeleteEntry(path, FileAttributes.ReadOnly);
}
else if (Directory.Exists(path))
{
Queue<DirectoryInfo> dirQueue = new Queue<DirectoryInfo>();
dirQueue.Enqueue(new DirectoryInfo(path));
bool nonEmptyDirs = false;
HashSet<string> processedDirs = new HashSet<string>();
while (dirQueue.Count > 0)
{
DirectoryInfo currentDir = dirQueue.Dequeue();
bool deleteDir = true;
if (processedDirs.Add(currentDir.FullName))
{
foreach (FileSystemInfo entry in currentDir.EnumerateFileSystemInfos())
{
// Tries to delete each entry. Failures are ignored
// as they will be picked up when the dir is
// deleted and not empty.
if (entry is DirectoryInfo)
{
if ((entry.Attributes & FileAttributes.ReparsePoint) != 0)
{
// If it's a reparse point, just delete it directly.
DeleteEntry(entry.FullName, entry.Attributes, ignoreFailure: true);
}
else
{
// Add the dir to the queue to delete and it will be processed next round.
dirQueue.Enqueue((DirectoryInfo)entry);
deleteDir = false;
}
}
else
{
DeleteEntry(entry.FullName, entry.Attributes, ignoreFailure: true);
}
}
}
if (deleteDir)
{
try
{
DeleteEntry(currentDir.FullName, FileAttributes.Directory);
}
catch (Win32Exception e)
{
if (e.NativeErrorCode == ERROR_DIR_NOT_EMPTY)
{
nonEmptyDirs = true;
}
else
{
throw;
}
}
}
else
{
dirQueue.Enqueue(currentDir);
}
}
if (nonEmptyDirs)
{
throw new IOException("Directory contains files still open by other processes");
}
}
}
private static void DeleteEntry(string path, FileAttributes attr, bool ignoreFailure = false)
{
try
{
if ((attr & FileAttributes.ReadOnly) != 0)
{
// Windows does not allow files set with ReadOnly to be
// deleted. Pre-emptively unset the attribute.
// FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE is quite new,
// look at using that flag with POSIX delete once Server 2019
// is the baseline.
File.SetAttributes(path, FileAttributes.Normal);
}
// REPARSE - Only touch the symlink itself and not the target
// BACKUP - Needed for dir handles, bypasses access checks for admins
// DELETE_ON_CLOSE is not used as it interferes with the POSIX delete
FileFlags flags = FileFlags.FILE_FLAG_OPEN_REPARSE_POINT |
FileFlags.FILE_FLAG_BACKUP_SEMANTICS;
using (SafeFileHandle fileHandle = CreateFile(path, FileSystemRights.Delete,
FileShare.ReadWrite | FileShare.Delete, FileMode.Open, FileAttributes.Normal, flags))
{
if (_supportsPosixDelete == null || _supportsPosixDelete == true)
{
// A POSIX delete will delete the filesystem entry even if
// it's still opened by another process so favour that if
// available.
DispositionFlags deleteFlags = DispositionFlags.FILE_DISPOSITION_DELETE |
DispositionFlags.FILE_DISPOSITION_POSIX_SEMANTICS;
SetInformationFile(fileHandle, FileDispositionInformationEx, (int)deleteFlags);
if (_supportsPosixDelete == true)
{
return;
}
}
// FileDispositionInformation takes in a struct with only a BOOLEAN value.
// Using an int will also do the same thing to set that flag to true.
SetInformationFile(fileHandle, FileDispositionInformation, Int32.MaxValue);
}
}
catch
{
if (!ignoreFailure)
{
throw;
}
}
}
private static void SetInformationFile(SafeFileHandle handle, int infoClass, int value)
{
IntPtr ioStatusBlock = IntPtr.Zero;
int ntStatus = NtSetInformationFile(handle, out ioStatusBlock, ref value,
Marshal.SizeOf(typeof(int)), infoClass);
if (ntStatus != 0)
{
int errCode = RtlNtStatusToDosError(ntStatus);
// The POSIX delete was added in Server 2016 (Win 10 14393/Redstone 1)
// Mark this flag so we don't try again.
if (infoClass == FileDispositionInformationEx && _supportsPosixDelete == null &&
errCode == ERROR_INVALID_PARAMETER)
{
_supportsPosixDelete = false;
return;
}
string msg = string.Format("NtSetInformationFile() failed 0x{0:X8}: {1}",
errCode, new Win32Exception(errCode).Message);
throw new Win32Exception(errCode, msg);
}
if (infoClass == FileDispositionInformationEx)
{
_supportsPosixDelete = true;
}
}
}
#endif
}
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,710 |
Configurable sampling/transfer of control-side task context metadata to targets
|
### Summary
We're often asked how to include arbitrary control-side contextual metadata with task invocations, and to include that metadata in target-side task log messages. e.g.: sending an AWX/Controller Job ID to the target hosts on each module invocation that occurred from that job, and logging it in the module-generated syslog/Windows Application Event Log entries for future correlation with the owning job.
I've not seen any consensus on precisely *which* data to include; one person's "critical forensic correlation data" is another's "unacceptable disclosure of sensitive execution detail". Seems like we'd need a generic facility to specify environment vars and/or hostvars to sample on the control host to be included with task invocations (under a reserved dictionary arg), and adjust the module logging APIs to include them.
My initial thought is to define a new core config element (defaulting to none) that allows the user to define a templated expression that would be rendered as part of each task's templating under a host context. The rendered result would be sent to modules as a new reserved internal module var. The module logging APIs would then include this value verbatim, when present. Other module code would also have access to the value, which could be used for anything. The new config would be settable either via ansible.cfg or an envvar, making it easier for AWX/Controller to later provide a mechanism to configure it for jobs using core versions that support it, while older versions would just silently ignore it.
Maybe something like:
```
ANSIBLE_ADDITIONAL_TASK_CONTEXT='{{awx_job_id}}'
```
When this config is non-empty, the defined template would be rendered for each task/host invocation, and its result included in a new `_ansible_additional_task_context` reserved module var. The resulting value, as with any Ansible template expression, could be of arbitrary complexity (eg, returning a data structure instead of just a scalar). The module logging APIs would include the serialized value verbatim in log messages when it is present, eg "ansible_additional_task_context=(whatever the value was)".
### Issue Type
Feature Idea
### Component Name
module invocation and logging
### Additional Information
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81710
|
https://github.com/ansible/ansible/pull/81711
|
4208bdbbcd994251579409ad533b40c9b0543550
|
1dd0d6fad70d7d4f423dac41822da65ff9ec95ef
| 2023-09-18T16:35:01Z |
python
| 2023-11-30T18:12:55Z |
lib/ansible/plugins/action/__init__.py
|
# coding: utf-8
# Copyright: (c) 2012-2014, Michael DeHaan <[email protected]>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import base64
import json
import os
import random
import re
import shlex
import stat
import tempfile
from abc import ABC, abstractmethod
from collections.abc import Sequence
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleActionSkip, AnsibleActionFail, AnsibleAuthenticationFailure
from ansible.executor.module_common import modify_module
from ansible.executor.interpreter_discovery import discover_interpreter, InterpreterDiscoveryRequiredError
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator
from ansible.module_utils.errors import UnsupportedError
from ansible.module_utils.json_utils import _filter_non_json_lines
from ansible.module_utils.six import binary_type, string_types, text_type
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.parsing.utils.jsonify import jsonify
from ansible.release import __version__
from ansible.utils.collection_loader import resource_from_fqcr
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import wrap_var, AnsibleUnsafeText
from ansible.vars.clean import remove_internal_keys
from ansible.utils.plugin_docs import get_versioned_doclink
display = Display()
def _validate_utf8_json(d):
if isinstance(d, text_type):
# Purposefully not using to_bytes here for performance reasons
d.encode(encoding='utf-8', errors='strict')
elif isinstance(d, dict):
for o in d.items():
_validate_utf8_json(o)
elif isinstance(d, (list, tuple)):
for o in d:
_validate_utf8_json(o)
class ActionBase(ABC):
'''
This class is the base class for all action plugins, and defines
code common to all actions. The base class handles the connection
by putting/getting files and executing commands based on the current
action in use.
'''
# A set of valid arguments
_VALID_ARGS = frozenset([]) # type: frozenset[str]
# behavioral attributes
BYPASS_HOST_LOOP = False
TRANSFERS_FILES = False
_requires_connection = True
_supports_check_mode = True
_supports_async = False
def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
self._task = task
self._connection = connection
self._play_context = play_context
self._loader = loader
self._templar = templar
self._shared_loader_obj = shared_loader_obj
self._cleanup_remote_tmp = False
# interpreter discovery state
self._discovered_interpreter_key = None
self._discovered_interpreter = False
self._discovery_deprecation_warnings = []
self._discovery_warnings = []
self._used_interpreter = None
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
@abstractmethod
def run(self, tmp=None, task_vars=None):
""" Action Plugins should implement this method to perform their
tasks. Everything else in this base class is a helper method for the
action plugin to do that.
:kwarg tmp: Deprecated parameter. This is no longer used. An action plugin that calls
another one and wants to use the same remote tmp for both should set
self._connection._shell.tmpdir rather than this parameter.
:kwarg task_vars: The variables (host vars, group vars, config vars,
etc) associated with this task.
:returns: dictionary of results from the module
Implementers of action modules may find the following variables especially useful:
* Module parameters. These are stored in self._task.args
"""
# does not default to {'changed': False, 'failed': False}, as it breaks async
result = {}
if tmp is not None:
result['warning'] = ['ActionModule.run() no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir']
del tmp
if self._task.async_val and not self._supports_async:
raise AnsibleActionFail('async is not supported for this task.')
elif self._task.check_mode and not self._supports_check_mode:
raise AnsibleActionSkip('check mode is not supported for this task.')
elif self._task.async_val and self._task.check_mode:
raise AnsibleActionFail('check mode and async cannot be used on same task.')
# Error if invalid argument is passed
if self._VALID_ARGS:
task_opts = frozenset(self._task.args.keys())
bad_opts = task_opts.difference(self._VALID_ARGS)
if bad_opts:
raise AnsibleActionFail('Invalid options for %s: %s' % (self._task.action, ','.join(list(bad_opts))))
if self._connection._shell.tmpdir is None and self._early_needs_tmp_path():
self._make_tmp_path()
return result
def validate_argument_spec(self, argument_spec=None,
mutually_exclusive=None,
required_together=None,
required_one_of=None,
required_if=None,
required_by=None,
):
"""Validate an argument spec against the task args
This will return a tuple of (ValidationResult, dict) where the dict
is the validated, coerced, and normalized task args.
Be cautious when directly passing ``new_module_args`` directly to a
module invocation, as it will contain the defaults, and not only
the args supplied from the task. If you do this, the module
should not define ``mututally_exclusive`` or similar.
This code is roughly copied from the ``validate_argument_spec``
action plugin for use by other action plugins.
"""
new_module_args = self._task.args.copy()
validator = ArgumentSpecValidator(
argument_spec,
mutually_exclusive=mutually_exclusive,
required_together=required_together,
required_one_of=required_one_of,
required_if=required_if,
required_by=required_by,
)
validation_result = validator.validate(new_module_args)
new_module_args.update(validation_result.validated_parameters)
try:
error = validation_result.errors[0]
except IndexError:
error = None
# Fail for validation errors, even in check mode
if error:
msg = validation_result.errors.msg
if isinstance(error, UnsupportedError):
msg = f"Unsupported parameters for ({self._load_name}) module: {msg}"
raise AnsibleActionFail(msg)
return validation_result, new_module_args
def cleanup(self, force=False):
"""Method to perform a clean up at the end of an action plugin execution
By default this is designed to clean up the shell tmpdir, and is toggled based on whether
async is in use
Action plugins may override this if they deem necessary, but should still call this method
via super
"""
if force or not self._task.async_val:
self._remove_tmp_path(self._connection._shell.tmpdir)
def get_plugin_option(self, plugin, option, default=None):
"""Helper to get an option from a plugin without having to use
the try/except dance everywhere to set a default
"""
try:
return plugin.get_option(option)
except (AttributeError, KeyError):
return default
def get_become_option(self, option, default=None):
return self.get_plugin_option(self._connection.become, option, default=default)
def get_connection_option(self, option, default=None):
return self.get_plugin_option(self._connection, option, default=default)
def get_shell_option(self, option, default=None):
return self.get_plugin_option(self._connection._shell, option, default=default)
def _remote_file_exists(self, path):
cmd = self._connection._shell.exists(path)
result = self._low_level_execute_command(cmd=cmd, sudoable=True)
if result['rc'] == 0:
return True
return False
def _configure_module(self, module_name, module_args, task_vars):
'''
Handles the loading and templating of the module code through the
modify_module() function.
'''
if self._task.delegate_to:
use_vars = task_vars.get('ansible_delegated_vars')[self._task.delegate_to]
else:
use_vars = task_vars
split_module_name = module_name.split('.')
collection_name = '.'.join(split_module_name[0:2]) if len(split_module_name) > 2 else ''
leaf_module_name = resource_from_fqcr(module_name)
# Search module path(s) for named module.
for mod_type in self._connection.module_implementation_preferences:
# Check to determine if PowerShell modules are supported, and apply
# some fixes (hacks) to module name + args.
if mod_type == '.ps1':
# FIXME: This should be temporary and moved to an exec subsystem plugin where we can define the mapping
# for each subsystem.
win_collection = 'ansible.windows'
rewrite_collection_names = ['ansible.builtin', 'ansible.legacy', '']
# async_status, win_stat, win_file, win_copy, and win_ping are not just like their
# python counterparts but they are compatible enough for our
# internal usage
# NB: we only rewrite the module if it's not being called by the user (eg, an action calling something else)
# and if it's unqualified or FQ to a builtin
if leaf_module_name in ('stat', 'file', 'copy', 'ping') and \
collection_name in rewrite_collection_names and self._task.action != module_name:
module_name = '%s.win_%s' % (win_collection, leaf_module_name)
elif leaf_module_name == 'async_status' and collection_name in rewrite_collection_names:
module_name = '%s.%s' % (win_collection, leaf_module_name)
# TODO: move this tweak down to the modules, not extensible here
# Remove extra quotes surrounding path parameters before sending to module.
if leaf_module_name in ['win_stat', 'win_file', 'win_copy', 'slurp'] and module_args and \
hasattr(self._connection._shell, '_unquote'):
for key in ('src', 'dest', 'path'):
if key in module_args:
module_args[key] = self._connection._shell._unquote(module_args[key])
result = self._shared_loader_obj.module_loader.find_plugin_with_context(module_name, mod_type, collection_list=self._task.collections)
if not result.resolved:
if result.redirect_list and len(result.redirect_list) > 1:
# take the last one in the redirect list, we may have successfully jumped through N other redirects
target_module_name = result.redirect_list[-1]
raise AnsibleError("The module {0} was redirected to {1}, which could not be loaded.".format(module_name, target_module_name))
module_path = result.plugin_resolved_path
if module_path:
break
else: # This is a for-else: http://bit.ly/1ElPkyg
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
# insert shared code and arguments into the module
final_environment = dict()
self._compute_environment_string(final_environment)
become_kwargs = {}
if self._connection.become:
become_kwargs['become'] = True
become_kwargs['become_method'] = self._connection.become.name
become_kwargs['become_user'] = self._connection.become.get_option('become_user',
playcontext=self._play_context)
become_kwargs['become_password'] = self._connection.become.get_option('become_pass',
playcontext=self._play_context)
become_kwargs['become_flags'] = self._connection.become.get_option('become_flags',
playcontext=self._play_context)
# modify_module will exit early if interpreter discovery is required; re-run after if necessary
for dummy in (1, 2):
try:
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args, self._templar,
task_vars=use_vars,
module_compression=C.config.get_config_value('DEFAULT_MODULE_COMPRESSION',
variables=task_vars),
async_timeout=self._task.async_val,
environment=final_environment,
remote_is_local=bool(getattr(self._connection, '_remote_is_local', False)),
**become_kwargs)
break
except InterpreterDiscoveryRequiredError as idre:
self._discovered_interpreter = AnsibleUnsafeText(discover_interpreter(
action=self,
interpreter_name=idre.interpreter_name,
discovery_mode=idre.discovery_mode,
task_vars=use_vars))
# update the local task_vars with the discovered interpreter (which might be None);
# we'll propagate back to the controller in the task result
discovered_key = 'discovered_interpreter_%s' % idre.interpreter_name
# update the local vars copy for the retry
use_vars['ansible_facts'][discovered_key] = self._discovered_interpreter
# TODO: this condition prevents 'wrong host' from being updated
# but in future we would want to be able to update 'delegated host facts'
# irrespective of task settings
if not self._task.delegate_to or self._task.delegate_facts:
# store in local task_vars facts collection for the retry and any other usages in this worker
task_vars['ansible_facts'][discovered_key] = self._discovered_interpreter
# preserve this so _execute_module can propagate back to controller as a fact
self._discovered_interpreter_key = discovered_key
else:
task_vars['ansible_delegated_vars'][self._task.delegate_to]['ansible_facts'][discovered_key] = self._discovered_interpreter
return (module_style, module_shebang, module_data, module_path)
def _compute_environment_string(self, raw_environment_out=None):
'''
Builds the environment string to be used when executing the remote task.
'''
final_environment = dict()
if self._task.environment is not None:
environments = self._task.environment
if not isinstance(environments, list):
environments = [environments]
# The order of environments matters to make sure we merge
# in the parent's values first so those in the block then
# task 'win' in precedence
for environment in environments:
if environment is None or len(environment) == 0:
continue
temp_environment = self._templar.template(environment)
if not isinstance(temp_environment, dict):
raise AnsibleError("environment must be a dictionary, received %s (%s)" % (temp_environment, type(temp_environment)))
# very deliberately using update here instead of combine_vars, as
# these environment settings should not need to merge sub-dicts
final_environment.update(temp_environment)
if len(final_environment) > 0:
final_environment = self._templar.template(final_environment)
if isinstance(raw_environment_out, dict):
raw_environment_out.clear()
raw_environment_out.update(final_environment)
return self._connection._shell.env_prefix(**final_environment)
def _early_needs_tmp_path(self):
'''
Determines if a tmp path should be created before the action is executed.
'''
return getattr(self, 'TRANSFERS_FILES', False)
def _is_pipelining_enabled(self, module_style, wrap_async=False):
'''
Determines if we are required and can do pipelining
'''
try:
is_enabled = self._connection.get_option('pipelining')
except (KeyError, AttributeError, ValueError):
is_enabled = self._play_context.pipelining
# winrm supports async pipeline
# TODO: make other class property 'has_async_pipelining' to separate cases
always_pipeline = self._connection.always_pipeline_modules
# su does not work with pipelining
# TODO: add has_pipelining class prop to become plugins
become_exception = (self._connection.become.name if self._connection.become else '') != 'su'
# any of these require a true
conditions = [
self._connection.has_pipelining, # connection class supports it
is_enabled or always_pipeline, # enabled via config or forced via connection (eg winrm)
module_style == "new", # old style modules do not support pipelining
not C.DEFAULT_KEEP_REMOTE_FILES, # user wants remote files
not wrap_async or always_pipeline, # async does not normally support pipelining unless it does (eg winrm)
become_exception,
]
return all(conditions)
def _get_admin_users(self):
'''
Returns a list of admin users that are configured for the current shell
plugin
'''
return self.get_shell_option('admin_users', ['root'])
def _get_remote_addr(self, tvars):
''' consistently get the 'remote_address' for the action plugin '''
remote_addr = tvars.get('delegated_vars', {}).get('ansible_host', tvars.get('ansible_host', tvars.get('inventory_hostname', None)))
for variation in ('remote_addr', 'host'):
try:
remote_addr = self._connection.get_option(variation)
except KeyError:
continue
break
else:
# plugin does not have, fallback to play_context
remote_addr = self._play_context.remote_addr
return remote_addr
def _get_remote_user(self):
''' consistently get the 'remote_user' for the action plugin '''
# TODO: use 'current user running ansible' as fallback when moving away from play_context
# pwd.getpwuid(os.getuid()).pw_name
remote_user = None
try:
remote_user = self._connection.get_option('remote_user')
except KeyError:
# plugin does not have remote_user option, fallback to default and/play_context
remote_user = getattr(self._connection, 'default_user', None) or self._play_context.remote_user
except AttributeError:
# plugin does not use config system, fallback to old play_context
remote_user = self._play_context.remote_user
return remote_user
def _is_become_unprivileged(self):
'''
The user is not the same as the connection user and is not part of the
shell configured admin users
'''
# if we don't use become then we know we aren't switching to a
# different unprivileged user
if not self._connection.become:
return False
# if we use become and the user is not an admin (or same user) then
# we need to return become_unprivileged as True
admin_users = self._get_admin_users()
remote_user = self._get_remote_user()
become_user = self.get_become_option('become_user')
return bool(become_user and become_user not in admin_users + [remote_user])
def _make_tmp_path(self, remote_user=None):
'''
Create and return a temporary path on a remote box.
'''
# Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
# As such, we want to avoid using remote_user for paths as remote_user may not line up with the local user
# This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
if getattr(self._connection, '_remote_is_local', False):
tmpdir = C.DEFAULT_LOCAL_TMP
else:
# NOTE: shell plugins should populate this setting anyways, but they dont do remote expansion, which
# we need for 'non posix' systems like cloud-init and solaris
tmpdir = self._remote_expand_user(self.get_shell_option('remote_tmp', default='~/.ansible/tmp'), sudoable=False)
become_unprivileged = self._is_become_unprivileged()
basefile = self._connection._shell._generate_temp_dir_name()
cmd = self._connection._shell.mkdtemp(basefile=basefile, system=become_unprivileged, tmpdir=tmpdir)
result = self._low_level_execute_command(cmd, sudoable=False)
# error handling on this seems a little aggressive?
if result['rc'] != 0:
if result['rc'] == 5:
output = 'Authentication failure.'
elif result['rc'] == 255 and self._connection.transport in ('ssh',):
if display.verbosity > 3:
output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
else:
output = (u'SSH encountered an unknown error during the connection. '
'We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
elif u'No space left on device' in result['stderr']:
output = result['stderr']
else:
output = ('Failed to create temporary directory. '
'In some cases, you may have been able to authenticate and did not have permissions on the target directory. '
'Consider changing the remote tmp path in ansible.cfg to a path rooted in "/tmp", for more error information use -vvv. '
'Failed command was: %s, exited with result %d' % (cmd, result['rc']))
if 'stdout' in result and result['stdout'] != u'':
output = output + u", stdout output: %s" % result['stdout']
if display.verbosity > 3 and 'stderr' in result and result['stderr'] != u'':
output += u", stderr output: %s" % result['stderr']
raise AnsibleConnectionFailure(output)
else:
self._cleanup_remote_tmp = True
try:
stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
rc = self._connection._shell.join_path(stdout_parts[-1], u'').splitlines()[-1]
except IndexError:
# stdout was empty or just space, set to / to trigger error in next if
rc = '/'
# Catch failure conditions, files should never be
# written to locations in /.
if rc == '/':
raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
self._connection._shell.tmpdir = rc
return rc
def _should_remove_tmp_path(self, tmp_path):
'''Determine if temporary path should be deleted or kept by user request/config'''
return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path
def _remove_tmp_path(self, tmp_path, force=False):
'''Remove a temporary path we created. '''
if tmp_path is None and self._connection._shell.tmpdir:
tmp_path = self._connection._shell.tmpdir
if force or self._should_remove_tmp_path(tmp_path):
cmd = self._connection._shell.remove(tmp_path, recurse=True)
# If we have gotten here we have a working connection configuration.
# If the connection breaks we could leave tmp directories out on the remote system.
tmp_rm_res = self._low_level_execute_command(cmd, sudoable=False)
if tmp_rm_res.get('rc', 0) != 0:
display.warning('Error deleting remote temporary files (rc: %s, stderr: %s})'
% (tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.')))
else:
self._connection._shell.tmpdir = None
def _transfer_file(self, local_path, remote_path):
"""
Copy a file from the controller to a remote path
:arg local_path: Path on controller to transfer
:arg remote_path: Path on the remote system to transfer into
.. warning::
* When you use this function you likely want to use use fixup_perms2() on the
remote_path to make sure that the remote file is readable when the user becomes
a non-privileged user.
* If you use fixup_perms2() on the file and copy or move the file into place, you will
need to then remove filesystem acls on the file once it has been copied into place by
the module. See how the copy module implements this for help.
"""
self._connection.put_file(local_path, remote_path)
return remote_path
def _transfer_data(self, remote_path, data):
'''
Copies the module data out to the temporary module path.
'''
if isinstance(data, dict):
data = jsonify(data)
afd, afile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
afo = os.fdopen(afd, 'wb')
try:
data = to_bytes(data, errors='surrogate_or_strict')
afo.write(data)
except Exception as e:
raise AnsibleError("failure writing module data to temporary file for transfer: %s" % to_native(e))
afo.flush()
afo.close()
try:
self._transfer_file(afile, remote_path)
finally:
os.unlink(afile)
return remote_path
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
"""
We need the files we upload to be readable (and sometimes executable)
by the user being sudo'd to but we want to limit other people's access
(because the files could contain passwords or other private
information. We achieve this in one of these ways:
* If no sudo is performed or the remote_user is sudo'ing to
themselves, we don't have to change permissions.
* If the remote_user sudo's to a privileged user (for instance, root),
we don't have to change permissions
* If the remote_user sudo's to an unprivileged user then we attempt to
grant the unprivileged user access via file system acls.
* If granting file system acls fails we try to change the owner of the
file with chown which only works in case the remote_user is
privileged or the remote systems allows chown calls by unprivileged
users (e.g. HP-UX)
* If the above fails, we next try 'chmod +a' which is a macOS way of
setting ACLs on files.
* If the above fails, we check if ansible_common_remote_group is set.
If it is, we attempt to chgrp the file to its value. This is useful
if the remote_user has a group in common with the become_user. As the
remote_user, we can chgrp the file to that group and allow the
become_user to read it.
* If (the chown fails AND ansible_common_remote_group is not set) OR
(ansible_common_remote_group is set AND the chgrp (or following chmod)
returned non-zero), we can set the file to be world readable so that
the second unprivileged user can read the file.
Since this could allow other users to get access to private
information we only do this if ansible is configured with
"allow_world_readable_tmpfiles" in the ansible.cfg. Also note that
when ansible_common_remote_group is set this final fallback is very
unlikely to ever be triggered, so long as chgrp was successful. But
just because the chgrp was successful, does not mean Ansible can
necessarily access the files (if, for example, the variable was set
to a group that remote_user is in, and can chgrp to, but does not have
in common with become_user).
"""
if remote_user is None:
remote_user = self._get_remote_user()
# Step 1: Are we on windows?
if getattr(self._connection._shell, "_IS_WINDOWS", False):
# This won't work on Powershell as-is, so we'll just completely
# skip until we have a need for it, at which point we'll have to do
# something different.
return remote_paths
# Step 2: If we're not becoming an unprivileged user, we are roughly
# done. Make the files +x if we're asked to, and return.
if not self._is_become_unprivileged():
if execute:
# Can't depend on the file being transferred with execute permissions.
# Only need user perms because no become was used here
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError(
'Failed to set execute bit on remote files '
'(rc: {0}, err: {1})'.format(
res['rc'],
to_native(res['stderr'])))
return remote_paths
# If we're still here, we have an unprivileged user that's different
# than the ssh user.
become_user = self.get_become_option('become_user')
# Try to use file system acls to make the files readable for sudo'd
# user
if execute:
chmod_mode = 'rx'
setfacl_mode = 'r-x'
# Apple patches their "file_cmds" chmod with ACL support
chmod_acl_mode = '{0} allow read,execute'.format(become_user)
# POSIX-draft ACL specification. Solaris, maybe others.
# See chmod(1) on something Solaris-based for syntax details.
posix_acl_mode = 'A+user:{0}:rx:allow'.format(become_user)
else:
chmod_mode = 'rX'
# TODO: this form fails silently on freebsd. We currently
# never call _fixup_perms2() with execute=False but if we
# start to we'll have to fix this.
setfacl_mode = 'r-X'
# Apple
chmod_acl_mode = '{0} allow read'.format(become_user)
# POSIX-draft
posix_acl_mode = 'A+user:{0}:r:allow'.format(become_user)
# Step 3a: Are we able to use setfacl to add user ACLs to the file?
res = self._remote_set_user_facl(
remote_paths,
become_user,
setfacl_mode)
if res['rc'] == 0:
return remote_paths
# Step 3b: Set execute if we need to. We do this before anything else
# because some of the methods below might work but not let us set +x
# as part of them.
if execute:
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError(
'Failed to set file mode or acl on remote temporary files '
'(rc: {0}, err: {1})'.format(
res['rc'],
to_native(res['stderr'])))
# Step 3c: File system ACLs failed above; try falling back to chown.
res = self._remote_chown(remote_paths, become_user)
if res['rc'] == 0:
return remote_paths
# Check if we are an admin/root user. If we are and got here, it means
# we failed to chown as root and something weird has happened.
if remote_user in self._get_admin_users():
raise AnsibleError(
'Failed to change ownership of the temporary files Ansible '
'(via chmod nor setfacl) needs to create despite connecting as a '
'privileged user. Unprivileged become user would be unable to read'
' the file.')
# Step 3d: Try macOS's special chmod + ACL
# macOS chmod's +a flag takes its own argument. As a slight hack, we
# pass that argument as the first element of remote_paths. So we end
# up running `chmod +a [that argument] [file 1] [file 2] ...`
try:
res = self._remote_chmod([chmod_acl_mode] + list(remote_paths), '+a')
except AnsibleAuthenticationFailure as e:
# Solaris-based chmod will return 5 when it sees an invalid mode,
# and +a is invalid there. Because it returns 5, which is the same
# thing sshpass returns on auth failure, our sshpass code will
# assume that auth failed. If we don't handle that case here, none
# of the other logic below will get run. This is fairly hacky and a
# corner case, but probably one that shows up pretty often in
# Solaris-based environments (and possibly others).
pass
else:
if res['rc'] == 0:
return remote_paths
# Step 3e: Try Solaris/OpenSolaris/OpenIndiana-sans-setfacl chmod
# Similar to macOS above, Solaris 11.4 drops setfacl and takes file ACLs
# via chmod instead. OpenSolaris and illumos-based distros allow for
# using either setfacl or chmod, and compatibility depends on filesystem.
# It should be possible to debug this branch by installing OpenIndiana
# (use ZFS) and going unpriv -> unpriv.
res = self._remote_chmod(remote_paths, posix_acl_mode)
if res['rc'] == 0:
return remote_paths
# we'll need this down here
become_link = get_versioned_doclink('playbook_guide/playbooks_privilege_escalation.html')
# Step 3f: Common group
# Otherwise, we're a normal user. We failed to chown the paths to the
# unprivileged user, but if we have a common group with them, we should
# be able to chown it to that.
#
# Note that we have no way of knowing if this will actually work... just
# because chgrp exits successfully does not mean that Ansible will work.
# We could check if the become user is in the group, but this would
# create an extra round trip.
#
# Also note that due to the above, this can prevent the
# world_readable_temp logic below from ever getting called. We
# leave this up to the user to rectify if they have both of these
# features enabled.
group = self.get_shell_option('common_remote_group')
if group is not None:
res = self._remote_chgrp(remote_paths, group)
if res['rc'] == 0:
# warn user that something might go weirdly here.
if self.get_shell_option('world_readable_temp'):
display.warning(
'Both common_remote_group and '
'allow_world_readable_tmpfiles are set. chgrp was '
'successful, but there is no guarantee that Ansible '
'will be able to read the files after this operation, '
'particularly if common_remote_group was set to a '
'group of which the unprivileged become user is not a '
'member. In this situation, '
'allow_world_readable_tmpfiles is a no-op. See this '
'URL for more details: %s'
'#risks-of-becoming-an-unprivileged-user' % become_link)
if execute:
group_mode = 'g+rwx'
else:
group_mode = 'g+rw'
res = self._remote_chmod(remote_paths, group_mode)
if res['rc'] == 0:
return remote_paths
# Step 4: World-readable temp directory
if self.get_shell_option('world_readable_temp'):
# chown and fs acls failed -- do things this insecure way only if
# the user opted in in the config file
display.warning(
'Using world-readable permissions for temporary files Ansible '
'needs to create when becoming an unprivileged user. This may '
'be insecure. For information on securing this, see %s'
'#risks-of-becoming-an-unprivileged-user' % become_link)
res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
if res['rc'] == 0:
return remote_paths
raise AnsibleError(
'Failed to set file mode on remote files '
'(rc: {0}, err: {1})'.format(
res['rc'],
to_native(res['stderr'])))
raise AnsibleError(
'Failed to set permissions on the temporary files Ansible needs '
'to create when becoming an unprivileged user '
'(rc: %s, err: %s}). For information on working around this, see %s'
'#risks-of-becoming-an-unprivileged-user' % (
res['rc'],
to_native(res['stderr']), become_link))
def _remote_chmod(self, paths, mode, sudoable=False):
'''
Issue a remote chmod command
'''
cmd = self._connection._shell.chmod(paths, mode)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_chown(self, paths, user, sudoable=False):
'''
Issue a remote chown command
'''
cmd = self._connection._shell.chown(paths, user)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_chgrp(self, paths, group, sudoable=False):
'''
Issue a remote chgrp command
'''
cmd = self._connection._shell.chgrp(paths, group)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_set_user_facl(self, paths, user, mode, sudoable=False):
'''
Issue a remote call to setfacl
'''
cmd = self._connection._shell.set_user_facl(paths, user, mode)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _execute_remote_stat(self, path, all_vars, follow, tmp=None, checksum=True):
'''
Get information from remote file.
'''
if tmp is not None:
display.warning('_execute_remote_stat no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir')
del tmp # No longer used
module_args = dict(
path=path,
follow=follow,
get_checksum=checksum,
get_size=False, # ansible.windows.win_stat added this in 1.11.0
checksum_algorithm='sha1',
)
# Unknown opts are ignored as module_args could be specific for the
# module that is being executed.
mystat = self._execute_module(module_name='ansible.legacy.stat', module_args=module_args, task_vars=all_vars,
wrap_async=False, ignore_unknown_opts=True)
if mystat.get('failed'):
msg = mystat.get('module_stderr')
if not msg:
msg = mystat.get('module_stdout')
if not msg:
msg = mystat.get('msg')
raise AnsibleError('Failed to get information on remote file (%s): %s' % (path, msg))
if not mystat['stat']['exists']:
# empty might be matched, 1 should never match, also backwards compatible
mystat['stat']['checksum'] = '1'
# happens sometimes when it is a dir and not on bsd
if 'checksum' not in mystat['stat']:
mystat['stat']['checksum'] = ''
elif not isinstance(mystat['stat']['checksum'], string_types):
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
return mystat['stat']
def _remote_expand_user(self, path, sudoable=True, pathsep=None):
''' takes a remote path and performs tilde/$HOME expansion on the remote host '''
# We only expand ~/path and ~username/path
if not path.startswith('~'):
return path
# Per Jborean, we don't have to worry about Windows as we don't have a notion of user's home
# dir there.
split_path = path.split(os.path.sep, 1)
expand_path = split_path[0]
if expand_path == '~':
# Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
# As such, we want to avoid using remote_user for paths as remote_user may not line up with the local user
# This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
become_user = self.get_become_option('become_user')
if getattr(self._connection, '_remote_is_local', False):
pass
elif sudoable and self._connection.become and become_user:
expand_path = '~%s' % become_user
else:
# use remote user instead, if none set default to current user
expand_path = '~%s' % (self._get_remote_user() or '')
# use shell to construct appropriate command and execute
cmd = self._connection._shell.expand_user(expand_path)
data = self._low_level_execute_command(cmd, sudoable=False)
try:
initial_fragment = data['stdout'].strip().splitlines()[-1]
except IndexError:
initial_fragment = None
if not initial_fragment:
# Something went wrong trying to expand the path remotely. Try using pwd, if not, return
# the original string
cmd = self._connection._shell.pwd()
pwd = self._low_level_execute_command(cmd, sudoable=False).get('stdout', '').strip()
if pwd:
expanded = pwd
else:
expanded = path
elif len(split_path) > 1:
expanded = self._connection._shell.join_path(initial_fragment, *split_path[1:])
else:
expanded = initial_fragment
if '..' in os.path.dirname(expanded).split('/'):
raise AnsibleError("'%s' returned an invalid relative home directory path containing '..'" % self._get_remote_addr({}))
return expanded
def _strip_success_message(self, data):
'''
Removes the BECOME-SUCCESS message from the data.
'''
if data.strip().startswith('BECOME-SUCCESS-'):
data = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', data)
return data
def _update_module_args(self, module_name, module_args, task_vars, ignore_unknown_opts: bool = False):
# set check mode in the module arguments, if required
if self._task.check_mode:
if not self._supports_check_mode:
raise AnsibleError("check mode is not supported for this operation")
module_args['_ansible_check_mode'] = True
else:
module_args['_ansible_check_mode'] = False
# set no log in the module arguments, if required
no_target_syslog = C.config.get_config_value('DEFAULT_NO_TARGET_SYSLOG', variables=task_vars)
module_args['_ansible_no_log'] = self._task.no_log or no_target_syslog
# set debug in the module arguments, if required
module_args['_ansible_debug'] = C.DEFAULT_DEBUG
# let module know we are in diff mode
module_args['_ansible_diff'] = self._task.diff
# let module know our verbosity
module_args['_ansible_verbosity'] = display.verbosity
# give the module information about the ansible version
module_args['_ansible_version'] = __version__
# give the module information about its name
module_args['_ansible_module_name'] = module_name
# set the syslog facility to be used in the module
module_args['_ansible_syslog_facility'] = task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY)
# let module know about filesystems that selinux treats specially
module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
# what to do when parameter values are converted to strings
module_args['_ansible_string_conversion_action'] = C.STRING_CONVERSION_ACTION
# give the module the socket for persistent connections
module_args['_ansible_socket'] = getattr(self._connection, 'socket_path')
if not module_args['_ansible_socket']:
module_args['_ansible_socket'] = task_vars.get('ansible_socket')
# make sure all commands use the designated shell executable
module_args['_ansible_shell_executable'] = self._play_context.executable
# make sure modules are aware if they need to keep the remote files
module_args['_ansible_keep_remote_files'] = C.DEFAULT_KEEP_REMOTE_FILES
# make sure all commands use the designated temporary directory if created
if self._is_become_unprivileged(): # force fallback on remote_tmp as user cannot normally write to dir
module_args['_ansible_tmpdir'] = None
else:
module_args['_ansible_tmpdir'] = self._connection._shell.tmpdir
# make sure the remote_tmp value is sent through in case modules needs to create their own
module_args['_ansible_remote_tmp'] = self.get_shell_option('remote_tmp', default='~/.ansible/tmp')
# tells the module to ignore options that are not in its argspec.
module_args['_ansible_ignore_unknown_opts'] = ignore_unknown_opts
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=None, wrap_async=False,
ignore_unknown_opts: bool = False):
'''
Transfer and run a module along with its arguments.
'''
if tmp is not None:
display.warning('_execute_module no longer honors the tmp parameter. Action plugins'
' should set self._connection._shell.tmpdir to share the tmpdir')
del tmp # No longer used
if delete_remote_tmp is not None:
display.warning('_execute_module no longer honors the delete_remote_tmp parameter.'
' Action plugins should check self._connection._shell.tmpdir to'
' see if a tmpdir existed before they were called to determine'
' if they are responsible for removing it.')
del delete_remote_tmp # No longer used
tmpdir = self._connection._shell.tmpdir
# We set the module_style to new here so the remote_tmp is created
# before the module args are built if remote_tmp is needed (async).
# If the module_style turns out to not be new and we didn't create the
# remote tmp here, it will still be created. This must be done before
# calling self._update_module_args() so the module wrapper has the
# correct remote_tmp value set
if not self._is_pipelining_enabled("new", wrap_async) and tmpdir is None:
self._make_tmp_path()
tmpdir = self._connection._shell.tmpdir
if task_vars is None:
task_vars = dict()
# if a module name was not specified for this execution, use the action from the task
if module_name is None:
module_name = self._task.action
if module_args is None:
module_args = self._task.args
self._update_module_args(module_name, module_args, task_vars, ignore_unknown_opts=ignore_unknown_opts)
remove_async_dir = None
if wrap_async or self._task.async_val:
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
remove_async_dir = len(self._task.environment)
self._task.environment.append({"ANSIBLE_ASYNC_DIR": async_dir})
# FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
display.vvv("Using module file %s" % module_path)
if not shebang and module_style != 'binary':
raise AnsibleError("module (%s) is missing interpreter line" % module_name)
self._used_interpreter = shebang
remote_module_path = None
if not self._is_pipelining_enabled(module_style, wrap_async):
# we might need remote tmp dir
if tmpdir is None:
self._make_tmp_path()
tmpdir = self._connection._shell.tmpdir
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
remote_module_path = self._connection._shell.join_path(tmpdir, 'AnsiballZ_%s' % remote_module_filename)
args_file_path = None
if module_style in ('old', 'non_native_want_json', 'binary'):
# we'll also need a tmp file to hold our module arguments
args_file_path = self._connection._shell.join_path(tmpdir, 'args')
if remote_module_path or module_style != 'new':
display.debug("transferring module to remote %s" % remote_module_path)
if module_style == 'binary':
self._transfer_file(module_path, remote_module_path)
else:
self._transfer_data(remote_module_path, module_data)
if module_style == 'old':
# we need to dump the module args to a k=v string in a file on
# the remote system, which can be read and parsed by the module
args_data = ""
for k, v in module_args.items():
args_data += '%s=%s ' % (k, shlex.quote(text_type(v)))
self._transfer_data(args_file_path, args_data)
elif module_style in ('non_native_want_json', 'binary'):
self._transfer_data(args_file_path, json.dumps(module_args))
display.debug("done transferring module to remote")
environment_string = self._compute_environment_string()
# remove the ANSIBLE_ASYNC_DIR env entry if we added a temporary one for
# the async_wrapper task.
if remove_async_dir is not None:
del self._task.environment[remove_async_dir]
remote_files = []
if tmpdir and remote_module_path:
remote_files = [tmpdir, remote_module_path]
if args_file_path:
remote_files.append(args_file_path)
sudoable = True
in_data = None
cmd = ""
if wrap_async and not self._connection.always_pipeline_modules:
# configure, upload, and chmod the async_wrapper module
(async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(
module_name='ansible.legacy.async_wrapper', module_args=dict(), task_vars=task_vars)
async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
remote_async_module_path = self._connection._shell.join_path(tmpdir, async_module_remote_filename)
self._transfer_data(remote_async_module_path, async_module_data)
remote_files.append(remote_async_module_path)
async_limit = self._task.async_val
async_jid = f'j{random.randint(0, 999999999999)}'
# call the interpreter for async_wrapper directly
# this permits use of a script for an interpreter on non-Linux platforms
interpreter = shebang.replace('#!', '').strip()
async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
if environment_string:
async_cmd.insert(0, environment_string)
if args_file_path:
async_cmd.append(args_file_path)
else:
# maintain a fixed number of positional parameters for async_wrapper
async_cmd.append('_')
if not self._should_remove_tmp_path(tmpdir):
async_cmd.append("-preserve_tmp")
cmd = " ".join(to_text(x) for x in async_cmd)
else:
if self._is_pipelining_enabled(module_style):
in_data = module_data
display.vvv("Pipelining is enabled.")
else:
cmd = remote_module_path
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()
# Fix permissions of the tmpdir path and tmpdir files. This should be called after all
# files have been transferred.
if remote_files:
# remove none/empty
remote_files = [x for x in remote_files if x]
self._fixup_perms2(remote_files, self._get_remote_user())
# actually execute
res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
# parse the main result
data = self._parse_returned_data(res)
# NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
# get internal info before cleaning
if data.pop("_ansible_suppress_tmpdir_delete", False):
self._cleanup_remote_tmp = False
# NOTE: yum returns results .. but that made it 'compatible' with squashing, so we allow mappings, for now
if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], string_types)):
data['ansible_module_results'] = data['results']
del data['results']
display.warning("Found internal 'results' key in module return, renamed to 'ansible_module_results'.")
# remove internal keys
remove_internal_keys(data)
if wrap_async:
# async_wrapper will clean up its tmpdir on its own so we want the controller side to
# forget about it now
self._connection._shell.tmpdir = None
# FIXME: for backwards compat, figure out if still makes sense
data['changed'] = True
# pre-split stdout/stderr into lines if needed
if 'stdout' in data and 'stdout_lines' not in data:
# if the value is 'False', a default won't catch it.
txt = data.get('stdout', None) or u''
data['stdout_lines'] = txt.splitlines()
if 'stderr' in data and 'stderr_lines' not in data:
# if the value is 'False', a default won't catch it.
txt = data.get('stderr', None) or u''
data['stderr_lines'] = txt.splitlines()
# propagate interpreter discovery results back to the controller
if self._discovered_interpreter_key:
if data.get('ansible_facts') is None:
data['ansible_facts'] = {}
data['ansible_facts'][self._discovered_interpreter_key] = self._discovered_interpreter
if self._discovery_warnings:
if data.get('warnings') is None:
data['warnings'] = []
data['warnings'].extend(self._discovery_warnings)
if self._discovery_deprecation_warnings:
if data.get('deprecations') is None:
data['deprecations'] = []
data['deprecations'].extend(self._discovery_deprecation_warnings)
# mark the entire module results untrusted as a template right here, since the current action could
# possibly template one of these values.
data = wrap_var(data)
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data
def _parse_returned_data(self, res):
try:
filtered_output, warnings = _filter_non_json_lines(res.get('stdout', u''), objects_only=True)
for w in warnings:
display.warning(w)
data = json.loads(filtered_output)
if C.MODULE_STRICT_UTF8_RESPONSE and not data.pop('_ansible_trusted_utf8', None):
try:
_validate_utf8_json(data)
except UnicodeEncodeError:
# When removing this, also remove the loop and latin-1 from ansible.module_utils.common.text.converters.jsonify
display.deprecated(
f'Module "{self._task.resolved_action or self._task.action}" returned non UTF-8 data in '
'the JSON response. This will become an error in the future',
version='2.18',
)
data['_ansible_parsed'] = True
except ValueError:
# not valid json, lets try to capture error
data = dict(failed=True, _ansible_parsed=False)
data['module_stdout'] = res.get('stdout', u'')
if 'stderr' in res:
data['module_stderr'] = res['stderr']
if res['stderr'].startswith(u'Traceback'):
data['exception'] = res['stderr']
# in some cases a traceback will arrive on stdout instead of stderr, such as when using ssh with -tt
if 'exception' not in data and data['module_stdout'].startswith(u'Traceback'):
data['exception'] = data['module_stdout']
# The default
data['msg'] = "MODULE FAILURE"
# try to figure out if we are missing interpreter
if self._used_interpreter is not None:
interpreter = re.escape(self._used_interpreter.lstrip('!#'))
match = re.compile('%s: (?:No such file or directory|not found)' % interpreter)
if match.search(data['module_stderr']) or match.search(data['module_stdout']):
data['msg'] = "The module failed to execute correctly, you probably need to set the interpreter."
# always append hint
data['msg'] += '\nSee stdout/stderr for the exact error'
if 'rc' in res:
data['rc'] = res['rc']
return data
# FIXME: move to connection base
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_then_replace', chdir=None):
'''
This is the function which executes the low level shell command, which
may be commands to create/remove directories for temporary files, or to
run the module code or python directly when pipelining.
:kwarg encoding_errors: If the value returned by the command isn't
utf-8 then we have to figure out how to transform it to unicode.
If the value is just going to be displayed to the user (or
discarded) then the default of 'replace' is fine. If the data is
used as a key or is going to be written back out to a file
verbatim, then this won't work. May have to use some sort of
replacement strategy (python3 could use surrogateescape)
:kwarg chdir: cd into this directory before executing the command.
'''
display.debug("_low_level_execute_command(): starting")
# if not cmd:
# # this can happen with powershell modules when there is no analog to a Windows command (like chmod)
# display.debug("_low_level_execute_command(): no command, exiting")
# return dict(stdout='', stderr='', rc=254)
if chdir:
display.debug("_low_level_execute_command(): changing cwd to %s for this command" % chdir)
cmd = self._connection._shell.append_command('cd %s' % chdir, cmd)
# https://github.com/ansible/ansible/issues/68054
if executable:
self._connection._shell.executable = executable
ruser = self._get_remote_user()
buser = self.get_become_option('become_user')
if (sudoable and self._connection.become and # if sudoable and have become
resource_from_fqcr(self._connection.transport) != 'network_cli' and # if not using network_cli
(C.BECOME_ALLOW_SAME_USER or (buser != ruser or not any((ruser, buser))))): # if we allow same user PE or users are different and either is set
display.debug("_low_level_execute_command(): using become for this command")
cmd = self._connection.become.build_become_command(cmd, self._connection._shell)
if self._connection.allow_executable:
if executable is None:
executable = self._play_context.executable
# mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
# only applied for the default executable to avoid interfering with the raw action
cmd = self._connection._shell.append_command(cmd, 'sleep 0')
if executable:
cmd = executable + ' -c ' + shlex.quote(cmd)
display.debug("_low_level_execute_command(): executing: %s" % (cmd,))
# Change directory to basedir of task for command execution when connection is local
if self._connection.transport == 'local':
self._connection.cwd = to_bytes(self._loader.get_basedir(), errors='surrogate_or_strict')
rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
# stdout and stderr may be either a file-like or a bytes object.
# Convert either one to a text type
if isinstance(stdout, binary_type):
out = to_text(stdout, errors=encoding_errors)
elif not isinstance(stdout, text_type):
out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
else:
out = stdout
if isinstance(stderr, binary_type):
err = to_text(stderr, errors=encoding_errors)
elif not isinstance(stderr, text_type):
err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
else:
err = stderr
if rc is None:
rc = 0
# be sure to remove the BECOME-SUCCESS message now
out = self._strip_success_message(out)
display.debug(u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err))
return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err, stderr_lines=err.splitlines())
def _get_diff_data(self, destination, source, task_vars, content, source_file=True):
# Note: Since we do not diff the source and destination before we transform from bytes into
# text the diff between source and destination may not be accurate. To fix this, we'd need
# to move the diffing from the callback plugins into here.
#
# Example of data which would cause trouble is src_content == b'\xff' and dest_content ==
# b'\xfe'. Neither of those are valid utf-8 so both get turned into the replacement
# character: diff['before'] = u'�' ; diff['after'] = u'�' When the callback plugin later
# diffs before and after it shows an empty diff.
diff = {}
display.debug("Going to peek to see if file has changed permissions")
peek_result = self._execute_module(
module_name='ansible.legacy.file', module_args=dict(path=destination, _diff_peek=True),
task_vars=task_vars, persist_files=True)
if peek_result.get('failed', False):
display.warning(u"Failed to get diff between '%s' and '%s': %s" % (os.path.basename(source), destination, to_text(peek_result.get(u'msg', u''))))
return diff
if peek_result.get('rc', 0) == 0:
if peek_result.get('state') in (None, 'absent'):
diff['before'] = u''
elif peek_result.get('appears_binary'):
diff['dst_binary'] = 1
elif peek_result.get('size') and C.MAX_FILE_SIZE_FOR_DIFF > 0 and peek_result['size'] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['dst_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug(u"Slurping the file %s" % source)
dest_result = self._execute_module(
module_name='ansible.legacy.slurp', module_args=dict(path=destination),
task_vars=task_vars, persist_files=True)
if 'content' in dest_result:
dest_contents = dest_result['content']
if dest_result['encoding'] == u'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise AnsibleError("unknown encoding in content option, failed: %s" % to_native(dest_result))
diff['before_header'] = destination
diff['before'] = to_text(dest_contents)
if source_file:
st = os.stat(source)
if C.MAX_FILE_SIZE_FOR_DIFF > 0 and st[stat.ST_SIZE] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug("Reading local copy of the file %s" % source)
try:
with open(source, 'rb') as src:
src_contents = src.read()
except Exception as e:
raise AnsibleError("Unexpected error while reading source (%s) for diff: %s " % (source, to_native(e)))
if b"\x00" in src_contents:
diff['src_binary'] = 1
else:
if content:
diff['after_header'] = destination
else:
diff['after_header'] = source
diff['after'] = to_text(src_contents)
else:
display.debug(u"source of file passed in")
diff['after_header'] = u'dynamically generated'
diff['after'] = source
if self._task.no_log:
if 'before' in diff:
diff["before"] = u""
if 'after' in diff:
diff["after"] = u" [[ Diff output has been hidden because 'no_log: true' was specified for this result ]]\n"
return diff
def _find_needle(self, dirname, needle):
'''
find a needle in haystack of paths, optionally using 'dirname' as a subdir.
This will build the ordered list of paths to search and pass them to dwim
to get back the first existing file found.
'''
# dwim already deals with playbook basedirs
path_stack = self._task.get_search_path()
# if missing it will return a file not found exception
return self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,710 |
Configurable sampling/transfer of control-side task context metadata to targets
|
### Summary
We're often asked how to include arbitrary control-side contextual metadata with task invocations, and to include that metadata in target-side task log messages. e.g.: sending an AWX/Controller Job ID to the target hosts on each module invocation that occurred from that job, and logging it in the module-generated syslog/Windows Application Event Log entries for future correlation with the owning job.
I've not seen any consensus on precisely *which* data to include; one person's "critical forensic correlation data" is another's "unacceptable disclosure of sensitive execution detail". Seems like we'd need a generic facility to specify environment vars and/or hostvars to sample on the control host to be included with task invocations (under a reserved dictionary arg), and adjust the module logging APIs to include them.
My initial thought is to define a new core config element (defaulting to none) that allows the user to define a templated expression that would be rendered as part of each task's templating under a host context. The rendered result would be sent to modules as a new reserved internal module var. The module logging APIs would then include this value verbatim, when present. Other module code would also have access to the value, which could be used for anything. The new config would be settable either via ansible.cfg or an envvar, making it easier for AWX/Controller to later provide a mechanism to configure it for jobs using core versions that support it, while older versions would just silently ignore it.
Maybe something like:
```
ANSIBLE_ADDITIONAL_TASK_CONTEXT='{{awx_job_id}}'
```
When this config is non-empty, the defined template would be rendered for each task/host invocation, and its result included in a new `_ansible_additional_task_context` reserved module var. The resulting value, as with any Ansible template expression, could be of arbitrary complexity (eg, returning a data structure instead of just a scalar). The module logging APIs would include the serialized value verbatim in log messages when it is present, eg "ansible_additional_task_context=(whatever the value was)".
### Issue Type
Feature Idea
### Component Name
module invocation and logging
### Additional Information
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81710
|
https://github.com/ansible/ansible/pull/81711
|
4208bdbbcd994251579409ad533b40c9b0543550
|
1dd0d6fad70d7d4f423dac41822da65ff9ec95ef
| 2023-09-18T16:35:01Z |
python
| 2023-11-30T18:12:55Z |
test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
|
#!powershell
#AnsibleRequires -CSharpUtil Ansible.Basic
$module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
Function Assert-Equal {
param(
[Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
[Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
process {
$matched = $false
if ($Actual -is [System.Collections.ArrayList] -or $Actual -is [Array]) {
$Actual.Count | Assert-Equal -Expected $Expected.Count
for ($i = 0; $i -lt $Actual.Count; $i++) {
$actual_value = $Actual[$i]
$expected_value = $Expected[$i]
Assert-Equal -Actual $actual_value -Expected $expected_value
}
$matched = $true
}
else {
$matched = $Actual -ceq $Expected
}
if (-not $matched) {
if ($Actual -is [PSObject]) {
$Actual = $Actual.ToString()
}
$call_stack = (Get-PSCallStack)[1]
$module.Result.failed = $true
$module.Result.test = $test
$module.Result.actual = $Actual
$module.Result.expected = $Expected
$module.Result.line = $call_stack.ScriptLineNumber
$module.Result.method = $call_stack.Position.Text
$module.Result.msg = "AssertionError: actual != expected"
Exit-Module
}
}
}
Function Assert-DictionaryEqual {
param(
[Parameter(Mandatory = $true, ValueFromPipeline = $true)][AllowNull()]$Actual,
[Parameter(Mandatory = $true, Position = 0)][AllowNull()]$Expected
)
process {
$actual_keys = $Actual.Keys
$expected_keys = $Expected.Keys
$actual_keys.Count | Assert-Equal -Expected $expected_keys.Count
foreach ($actual_entry in $Actual.GetEnumerator()) {
$actual_key = $actual_entry.Key
($actual_key -cin $expected_keys) | Assert-Equal -Expected $true
$actual_value = $actual_entry.Value
$expected_value = $Expected.$actual_key
if ($actual_value -is [System.Collections.IDictionary]) {
$actual_value | Assert-DictionaryEqual -Expected $expected_value
}
elseif ($actual_value -is [System.Collections.ArrayList] -or $actual_value -is [Array]) {
for ($i = 0; $i -lt $actual_value.Count; $i++) {
$actual_entry = $actual_value[$i]
$expected_entry = $expected_value[$i]
if ($actual_entry -is [System.Collections.IDictionary]) {
$actual_entry | Assert-DictionaryEqual -Expected $expected_entry
}
else {
Assert-Equal -Actual $actual_entry -Expected $expected_entry
}
}
}
else {
Assert-Equal -Actual $actual_value -Expected $expected_value
}
}
foreach ($expected_key in $expected_keys) {
($expected_key -cin $actual_keys) | Assert-Equal -Expected $true
}
}
}
Function Exit-Module {
# Make sure Exit actually calls exit and not our overridden test behaviour
[Ansible.Basic.AnsibleModule]::Exit = { param([Int32]$rc) exit $rc }
Write-Output -InputObject (ConvertTo-Json -InputObject $module.Result -Compress -Depth 99)
$module.ExitJson()
}
$tmpdir = $module.Tmpdir
# Override the Exit and WriteLine behaviour to throw an exception instead of exiting the module
[Ansible.Basic.AnsibleModule]::Exit = {
param([Int32]$rc)
$exp = New-Object -TypeName System.Exception -ArgumentList "exit: $rc"
$exp | Add-Member -Type NoteProperty -Name Output -Value $_test_out
throw $exp
}
[Ansible.Basic.AnsibleModule]::WriteLine = {
param([String]$line)
Set-Variable -Name _test_out -Scope Global -Value $line
}
$tests = @{
"Empty spec and no options - args file" = {
$args_file = Join-Path -Path $tmpdir -ChildPath "args-$(Get-Random).json"
[System.IO.File]::WriteAllText($args_file, '{ "ANSIBLE_MODULE_ARGS": {} }')
$m = [Ansible.Basic.AnsibleModule]::Create(@($args_file), @{})
$m.CheckMode | Assert-Equal -Expected $false
$m.DebugMode | Assert-Equal -Expected $false
$m.DiffMode | Assert-Equal -Expected $false
$m.KeepRemoteFiles | Assert-Equal -Expected $false
$m.ModuleName | Assert-Equal -Expected "undefined win module"
$m.NoLog | Assert-Equal -Expected $false
$m.Verbosity | Assert-Equal -Expected 0
$m.AnsibleVersion | Assert-Equal -Expected $null
}
"Empty spec and no options - complex_args" = {
Set-Variable -Name complex_args -Scope Global -Value @{}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.CheckMode | Assert-Equal -Expected $false
$m.DebugMode | Assert-Equal -Expected $false
$m.DiffMode | Assert-Equal -Expected $false
$m.KeepRemoteFiles | Assert-Equal -Expected $false
$m.ModuleName | Assert-Equal -Expected "undefined win module"
$m.NoLog | Assert-Equal -Expected $false
$m.Verbosity | Assert-Equal -Expected 0
$m.AnsibleVersion | Assert-Equal -Expected $null
}
"Internal param changes - args file" = {
$m_tmpdir = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
New-Item -Path $m_tmpdir -ItemType Directory > $null
$args_file = Join-Path -Path $tmpdir -ChildPath "args-$(Get-Random).json"
[System.IO.File]::WriteAllText($args_file, @"
{
"ANSIBLE_MODULE_ARGS": {
"_ansible_check_mode": true,
"_ansible_debug": true,
"_ansible_diff": true,
"_ansible_keep_remote_files": true,
"_ansible_module_name": "ansible_basic_tests",
"_ansible_no_log": true,
"_ansible_remote_tmp": "%TEMP%",
"_ansible_selinux_special_fs": "ignored",
"_ansible_shell_executable": "ignored",
"_ansible_socket": "ignored",
"_ansible_syslog_facility": "ignored",
"_ansible_tmpdir": "$($m_tmpdir -replace "\\", "\\")",
"_ansible_verbosity": 3,
"_ansible_version": "2.8.0"
}
}
"@)
$m = [Ansible.Basic.AnsibleModule]::Create(@($args_file), @{supports_check_mode = $true })
$m.CheckMode | Assert-Equal -Expected $true
$m.DebugMode | Assert-Equal -Expected $true
$m.DiffMode | Assert-Equal -Expected $true
$m.KeepRemoteFiles | Assert-Equal -Expected $true
$m.ModuleName | Assert-Equal -Expected "ansible_basic_tests"
$m.NoLog | Assert-Equal -Expected $true
$m.Verbosity | Assert-Equal -Expected 3
$m.AnsibleVersion | Assert-Equal -Expected "2.8.0"
$m.Tmpdir | Assert-Equal -Expected $m_tmpdir
}
"Internal param changes - complex_args" = {
$m_tmpdir = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
New-Item -Path $m_tmpdir -ItemType Directory > $null
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_check_mode = $true
_ansible_debug = $true
_ansible_diff = $true
_ansible_keep_remote_files = $true
_ansible_module_name = "ansible_basic_tests"
_ansible_no_log = $true
_ansible_remote_tmp = "%TEMP%"
_ansible_selinux_special_fs = "ignored"
_ansible_shell_executable = "ignored"
_ansible_socket = "ignored"
_ansible_syslog_facility = "ignored"
_ansible_tmpdir = $m_tmpdir.ToString()
_ansible_verbosity = 3
_ansible_version = "2.8.0"
}
$spec = @{
supports_check_mode = $true
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$m.CheckMode | Assert-Equal -Expected $true
$m.DebugMode | Assert-Equal -Expected $true
$m.DiffMode | Assert-Equal -Expected $true
$m.KeepRemoteFiles | Assert-Equal -Expected $true
$m.ModuleName | Assert-Equal -Expected "ansible_basic_tests"
$m.NoLog | Assert-Equal -Expected $true
$m.Verbosity | Assert-Equal -Expected 3
$m.AnsibleVersion | Assert-Equal -Expected "2.8.0"
$m.Tmpdir | Assert-Equal -Expected $m_tmpdir
}
"Parse complex module options" = {
$spec = @{
options = @{
option_default = @{}
missing_option_default = @{}
string_option = @{type = "str" }
required_option = @{required = $true }
missing_choices = @{choices = "a", "b" }
choices = @{choices = "a", "b" }
one_choice = @{choices = , "b" }
choice_with_default = @{choices = "a", "b"; default = "b" }
alias_direct = @{aliases = , "alias_direct1" }
alias_as_alias = @{aliases = "alias_as_alias1", "alias_as_alias2" }
bool_type = @{type = "bool" }
bool_from_str = @{type = "bool" }
dict_type = @{
type = "dict"
options = @{
int_type = @{type = "int" }
str_type = @{type = "str"; default = "str_sub_type" }
}
}
dict_type_missing = @{
type = "dict"
options = @{
int_type = @{type = "int" }
str_type = @{type = "str"; default = "str_sub_type" }
}
}
dict_type_defaults = @{
type = "dict"
apply_defaults = $true
options = @{
int_type = @{type = "int" }
str_type = @{type = "str"; default = "str_sub_type" }
}
}
dict_type_json = @{type = "dict" }
dict_type_str = @{type = "dict" }
float_type = @{type = "float" }
int_type = @{type = "int" }
json_type = @{type = "json" }
json_type_dict = @{type = "json" }
list_type = @{type = "list" }
list_type_str = @{type = "list" }
list_with_int = @{type = "list"; elements = "int" }
list_type_single = @{type = "list" }
list_with_dict = @{
type = "list"
elements = "dict"
options = @{
int_type = @{type = "int" }
str_type = @{type = "str"; default = "str_sub_type" }
}
}
path_type = @{type = "path" }
path_type_nt = @{type = "path" }
path_type_missing = @{type = "path" }
raw_type_str = @{type = "raw" }
raw_type_int = @{type = "raw" }
sid_type = @{type = "sid" }
sid_from_name = @{type = "sid" }
str_type = @{type = "str" }
delegate_type = @{type = [Func[[Object], [UInt64]]] { [System.UInt64]::Parse($args[0]) } }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_default = 1
string_option = 1
required_option = "required"
choices = "a"
one_choice = "b"
alias_direct = "a"
alias_as_alias2 = "a"
bool_type = $true
bool_from_str = "false"
dict_type = @{
int_type = "10"
}
dict_type_json = '{"a":"a","b":1,"c":["a","b"]}'
dict_type_str = 'a=a b="b 2" c=c'
float_type = "3.14159"
int_type = 0
json_type = '{"a":"a","b":1,"c":["a","b"]}'
json_type_dict = @{
a = "a"
b = 1
c = @("a", "b")
}
list_type = @("a", "b", 1, 2)
list_type_str = "a, b,1,2 "
list_with_int = @("1", 2)
list_type_single = "single"
list_with_dict = @(
@{
int_type = 2
str_type = "dict entry"
},
@{ int_type = 1 },
@{}
)
path_type = "%SystemRoot%\System32"
path_type_nt = "\\?\%SystemRoot%\System32"
path_type_missing = "T:\missing\path"
raw_type_str = "str"
raw_type_int = 1
sid_type = "S-1-5-18"
sid_from_name = "SYSTEM"
str_type = "str"
delegate_type = "1234"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$m.Params.option_default | Assert-Equal -Expected "1"
$m.Params.option_default.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.missing_option_default | Assert-Equal -Expected $null
$m.Params.string_option | Assert-Equal -Expected "1"
$m.Params.string_option.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.required_option | Assert-Equal -Expected "required"
$m.Params.required_option.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.missing_choices | Assert-Equal -Expected $null
$m.Params.choices | Assert-Equal -Expected "a"
$m.Params.choices.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.one_choice | Assert-Equal -Expected "b"
$m.Params.one_choice.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.choice_with_default | Assert-Equal -Expected "b"
$m.Params.choice_with_default.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.alias_direct | Assert-Equal -Expected "a"
$m.Params.alias_direct.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.alias_as_alias | Assert-Equal -Expected "a"
$m.Params.alias_as_alias.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.bool_type | Assert-Equal -Expected $true
$m.Params.bool_type.GetType().ToString() | Assert-Equal -Expected "System.Boolean"
$m.Params.bool_from_str | Assert-Equal -Expected $false
$m.Params.bool_from_str.GetType().ToString() | Assert-Equal -Expected "System.Boolean"
$m.Params.dict_type | Assert-DictionaryEqual -Expected @{int_type = 10; str_type = "str_sub_type" }
$m.Params.dict_type.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
$m.Params.dict_type.int_type.GetType().ToString() | Assert-Equal -Expected "System.Int32"
$m.Params.dict_type.str_type.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.dict_type_missing | Assert-Equal -Expected $null
$m.Params.dict_type_defaults | Assert-DictionaryEqual -Expected @{int_type = $null; str_type = "str_sub_type" }
$m.Params.dict_type_defaults.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
$m.Params.dict_type_defaults.str_type.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.dict_type_json | Assert-DictionaryEqual -Expected @{
a = "a"
b = 1
c = @("a", "b")
}
$m.Params.dict_type_json.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
$m.Params.dict_type_json.a.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.dict_type_json.b.GetType().ToString() | Assert-Equal -Expected "System.Int32"
$m.Params.dict_type_json.c.GetType().ToString() | Assert-Equal -Expected "System.Collections.ArrayList"
$m.Params.dict_type_str | Assert-DictionaryEqual -Expected @{a = "a"; b = "b 2"; c = "c" }
$m.Params.dict_type_str.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.Dictionary``2[System.String,System.Object]"
$m.Params.dict_type_str.a.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.dict_type_str.b.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.dict_type_str.c.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.float_type | Assert-Equal -Expected ([System.Single]3.14159)
$m.Params.float_type.GetType().ToString() | Assert-Equal -Expected "System.Single"
$m.Params.int_type | Assert-Equal -Expected 0
$m.Params.int_type.GetType().ToString() | Assert-Equal -Expected "System.Int32"
$m.Params.json_type | Assert-Equal -Expected '{"a":"a","b":1,"c":["a","b"]}'
$m.Params.json_type.GetType().ToString() | Assert-Equal -Expected "System.String"
$jsonValue = ([Ansible.Basic.AnsibleModule]::FromJson('{"a":"a","b":1,"c":["a","b"]}'))
[Ansible.Basic.AnsibleModule]::FromJson($m.Params.json_type_dict) | Assert-DictionaryEqual -Expected $jsonValue
$m.Params.json_type_dict.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.list_type.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.List``1[System.Object]"
$m.Params.list_type.Count | Assert-Equal -Expected 4
$m.Params.list_type[0] | Assert-Equal -Expected "a"
$m.Params.list_type[0].GetType().FullName | Assert-Equal -Expected "System.String"
$m.Params.list_type[1] | Assert-Equal -Expected "b"
$m.Params.list_type[1].GetType().FullName | Assert-Equal -Expected "System.String"
$m.Params.list_type[2] | Assert-Equal -Expected 1
$m.Params.list_type[2].GetType().FullName | Assert-Equal -Expected "System.Int32"
$m.Params.list_type[3] | Assert-Equal -Expected 2
$m.Params.list_type[3].GetType().FullName | Assert-Equal -Expected "System.Int32"
$m.Params.list_type_str.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.List``1[System.Object]"
$m.Params.list_type_str.Count | Assert-Equal -Expected 4
$m.Params.list_type_str[0] | Assert-Equal -Expected "a"
$m.Params.list_type_str[0].GetType().FullName | Assert-Equal -Expected "System.String"
$m.Params.list_type_str[1] | Assert-Equal -Expected "b"
$m.Params.list_type_str[1].GetType().FullName | Assert-Equal -Expected "System.String"
$m.Params.list_type_str[2] | Assert-Equal -Expected "1"
$m.Params.list_type_str[2].GetType().FullName | Assert-Equal -Expected "System.String"
$m.Params.list_type_str[3] | Assert-Equal -Expected "2"
$m.Params.list_type_str[3].GetType().FullName | Assert-Equal -Expected "System.String"
$m.Params.list_with_int.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.List``1[System.Object]"
$m.Params.list_with_int.Count | Assert-Equal -Expected 2
$m.Params.list_with_int[0] | Assert-Equal -Expected 1
$m.Params.list_with_int[0].GetType().FullName | Assert-Equal -Expected "System.Int32"
$m.Params.list_with_int[1] | Assert-Equal -Expected 2
$m.Params.list_with_int[1].GetType().FullName | Assert-Equal -Expected "System.Int32"
$m.Params.list_type_single.GetType().ToString() | Assert-Equal -Expected "System.Collections.Generic.List``1[System.Object]"
$m.Params.list_type_single.Count | Assert-Equal -Expected 1
$m.Params.list_type_single[0] | Assert-Equal -Expected "single"
$m.Params.list_type_single[0].GetType().FullName | Assert-Equal -Expected "System.String"
$m.Params.list_with_dict.GetType().FullName.StartsWith("System.Collections.Generic.List``1[[System.Object") | Assert-Equal -Expected $true
$m.Params.list_with_dict.Count | Assert-Equal -Expected 3
$m.Params.list_with_dict[0].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equal -Expected $true
$m.Params.list_with_dict[0] | Assert-DictionaryEqual -Expected @{int_type = 2; str_type = "dict entry" }
$m.Params.list_with_dict[0].int_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.Int32"
$m.Params.list_with_dict[0].str_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.String"
$m.Params.list_with_dict[1].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equal -Expected $true
$m.Params.list_with_dict[1] | Assert-DictionaryEqual -Expected @{int_type = 1; str_type = "str_sub_type" }
$m.Params.list_with_dict[1].int_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.Int32"
$m.Params.list_with_dict[1].str_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.String"
$m.Params.list_with_dict[2].GetType().FullName.StartsWith("System.Collections.Generic.Dictionary``2[[System.String") | Assert-Equal -Expected $true
$m.Params.list_with_dict[2] | Assert-DictionaryEqual -Expected @{int_type = $null; str_type = "str_sub_type" }
$m.Params.list_with_dict[2].str_type.GetType().FullName.ToString() | Assert-Equal -Expected "System.String"
$m.Params.path_type | Assert-Equal -Expected "$($env:SystemRoot)\System32"
$m.Params.path_type.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.path_type_nt | Assert-Equal -Expected "\\?\%SystemRoot%\System32"
$m.Params.path_type_nt.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.path_type_missing | Assert-Equal -Expected "T:\missing\path"
$m.Params.path_type_missing.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.raw_type_str | Assert-Equal -Expected "str"
$m.Params.raw_type_str.GetType().FullName | Assert-Equal -Expected "System.String"
$m.Params.raw_type_int | Assert-Equal -Expected 1
$m.Params.raw_type_int.GetType().FullName | Assert-Equal -Expected "System.Int32"
$m.Params.sid_type | Assert-Equal -Expected (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList "S-1-5-18")
$m.Params.sid_type.GetType().ToString() | Assert-Equal -Expected "System.Security.Principal.SecurityIdentifier"
$m.Params.sid_from_name | Assert-Equal -Expected (New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList "S-1-5-18")
$m.Params.sid_from_name.GetType().ToString() | Assert-Equal -Expected "System.Security.Principal.SecurityIdentifier"
$m.Params.str_type | Assert-Equal -Expected "str"
$m.Params.str_type.GetType().ToString() | Assert-Equal -Expected "System.String"
$m.Params.delegate_type | Assert-Equal -Expected 1234
$m.Params.delegate_type.GetType().ToString() | Assert-Equal -Expected "System.UInt64"
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_module_args = @{
option_default = "1"
missing_option_default = $null
string_option = "1"
required_option = "required"
missing_choices = $null
choices = "a"
one_choice = "b"
choice_with_default = "b"
alias_direct = "a"
alias_as_alias = "a"
alias_as_alias2 = "a"
bool_type = $true
bool_from_str = $false
dict_type = @{
int_type = 10
str_type = "str_sub_type"
}
dict_type_missing = $null
dict_type_defaults = @{
int_type = $null
str_type = "str_sub_type"
}
dict_type_json = @{
a = "a"
b = 1
c = @("a", "b")
}
dict_type_str = @{
a = "a"
b = "b 2"
c = "c"
}
float_type = 3.14159
int_type = 0
json_type = $m.Params.json_type.ToString()
json_type_dict = $m.Params.json_type_dict.ToString()
list_type = @("a", "b", 1, 2)
list_type_str = @("a", "b", "1", "2")
list_with_int = @(1, 2)
list_type_single = @("single")
list_with_dict = @(
@{
int_type = 2
str_type = "dict entry"
},
@{
int_type = 1
str_type = "str_sub_type"
},
@{
int_type = $null
str_type = "str_sub_type"
}
)
path_type = "$($env:SystemRoot)\System32"
path_type_nt = "\\?\%SystemRoot%\System32"
path_type_missing = "T:\missing\path"
raw_type_str = "str"
raw_type_int = 1
sid_type = "S-1-5-18"
sid_from_name = "S-1-5-18"
str_type = "str"
delegate_type = 1234
}
$actual.Keys.Count | Assert-Equal -Expected 2
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $expected_module_args }
}
"Parse module args with list elements and delegate type" = {
$spec = @{
options = @{
list_delegate_type = @{
type = "list"
elements = [Func[[Object], [UInt16]]] { [System.UInt16]::Parse($args[0]) }
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
list_delegate_type = @(
"1234",
4321
)
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$m.Params.list_delegate_type.GetType().Name | Assert-Equal -Expected 'List`1'
$m.Params.list_delegate_type[0].GetType().FullName | Assert-Equal -Expected "System.UInt16"
$m.Params.list_delegate_Type[1].GetType().FullName | Assert-Equal -Expected "System.UInt16"
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_module_args = @{
list_delegate_type = @(
1234,
4321
)
}
$actual.Keys.Count | Assert-Equal -Expected 2
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $expected_module_args }
}
"Parse module args with case insensitive input" = {
$spec = @{
options = @{
option1 = @{ type = "int"; required = $true }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_module_name = "win_test"
Option1 = "1"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
# Verifies the case of the params key is set to the module spec not actual input
$m.Params.Keys | Assert-Equal -Expected @("option1")
$m.Params.option1 | Assert-Equal -Expected 1
# Verifies the type conversion happens even on a case insensitive match
$m.Params.option1.GetType().FullName | Assert-Equal -Expected "System.Int32"
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_warnings = "Parameters for (win_test) was a case insensitive match: Option1. "
$expected_warnings += "Module options will become case sensitive in a future Ansible release. "
$expected_warnings += "Supported parameters include: option1"
$expected = @{
changed = $false
invocation = @{
module_args = @{
option1 = 1
}
}
# We have disabled the warning for now
#warnings = @($expected_warnings)
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"No log values" = {
$spec = @{
options = @{
username = @{type = "str" }
password = @{type = "str"; no_log = $true }
password2 = @{type = "int"; no_log = $true }
dict = @{type = "dict" }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_module_name = "test_no_log"
username = "user - pass - name"
password = "pass"
password2 = 1234
dict = @{
data = "Oops this is secret: pass"
dict = @{
pass = "plain"
hide = "pass"
sub_hide = "password"
int_hide = 123456
}
list = @(
"pass",
"password",
1234567,
"pa ss",
@{
pass = "plain"
hide = "pass"
sub_hide = "password"
int_hide = 123456
}
)
custom = "pass"
}
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$m.Result.data = $complex_args.dict
# verify params internally aren't masked
$m.Params.username | Assert-Equal -Expected "user - pass - name"
$m.Params.password | Assert-Equal -Expected "pass"
$m.Params.password2 | Assert-Equal -Expected 1234
$m.Params.dict.custom | Assert-Equal -Expected "pass"
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
# verify no_log params are masked in invocation
$expected = @{
invocation = @{
module_args = @{
password2 = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
dict = @{
dict = @{
pass = "plain"
hide = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
sub_hide = "********word"
int_hide = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
}
custom = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
list = @(
"VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"********word",
"VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"pa ss",
@{
pass = "plain"
hide = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
sub_hide = "********word"
int_hide = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
}
)
data = "Oops this is secret: ********"
}
username = "user - ******** - name"
password = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
}
}
changed = $false
data = $complex_args.dict
}
$actual | Assert-DictionaryEqual -Expected $expected
$expected_event = @'
test_no_log - Invoked with:
username: user - ******** - name
dict: dict: sub_hide: ****word
pass: plain
int_hide: ********56
hide: VALUE_SPECIFIED_IN_NO_LOG_PARAMETER
data: Oops this is secret: ********
custom: VALUE_SPECIFIED_IN_NO_LOG_PARAMETER
list:
- VALUE_SPECIFIED_IN_NO_LOG_PARAMETER
- ********word
- ********567
- pa ss
- sub_hide: ********word
pass: plain
int_hide: ********56
hide: VALUE_SPECIFIED_IN_NO_LOG_PARAMETER
password2: VALUE_SPECIFIED_IN_NO_LOG_PARAMETER
password: VALUE_SPECIFIED_IN_NO_LOG_PARAMETER
'@
$actual_event = (Get-EventLog -LogName Application -Source Ansible -Newest 1).Message
$actual_event | Assert-DictionaryEqual -Expected $expected_event
}
"No log value with an empty string" = {
$spec = @{
options = @{
password1 = @{type = "str"; no_log = $true }
password2 = @{type = "str"; no_log = $true }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_module_name = "test_no_log"
password1 = ""
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$m.Result.data = $complex_args.dict
# verify params internally aren't masked
$m.Params.password1 | Assert-Equal -Expected ""
$m.Params.password2 | Assert-Equal -Expected $null
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
invocation = @{
module_args = @{
password1 = ""
password2 = $null
}
}
changed = $false
data = $complex_args.dict
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Removed in version" = {
$spec = @{
options = @{
removed1 = @{removed_in_version = "2.1" }
removed2 = @{removed_in_version = "2.2" }
removed3 = @{removed_in_version = "2.3"; removed_from_collection = "ansible.builtin" }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
removed1 = "value"
removed3 = "value"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
invocation = @{
module_args = @{
removed1 = "value"
removed2 = $null
removed3 = "value"
}
}
deprecations = @(
@{
msg = "Param 'removed3' is deprecated. See the module docs for more information"
version = "2.3"
collection_name = "ansible.builtin"
},
@{
msg = "Param 'removed1' is deprecated. See the module docs for more information"
version = "2.1"
collection_name = $null
}
)
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Removed at date" = {
$spec = @{
options = @{
removed1 = @{removed_at_date = [DateTime]"2020-03-10" }
removed2 = @{removed_at_date = [DateTime]"2020-03-11" }
removed3 = @{removed_at_date = [DateTime]"2020-06-07"; removed_from_collection = "ansible.builtin" }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
removed1 = "value"
removed3 = "value"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
invocation = @{
module_args = @{
removed1 = "value"
removed2 = $null
removed3 = "value"
}
}
deprecations = @(
@{
msg = "Param 'removed3' is deprecated. See the module docs for more information"
date = "2020-06-07"
collection_name = "ansible.builtin"
},
@{
msg = "Param 'removed1' is deprecated. See the module docs for more information"
date = "2020-03-10"
collection_name = $null
}
)
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Deprecated aliases" = {
$spec = @{
options = @{
option1 = @{ type = "str"; aliases = "alias1"; deprecated_aliases = @(@{name = "alias1"; version = "2.10" }) }
option2 = @{ type = "str"; aliases = "alias2"; deprecated_aliases = @(@{name = "alias2"; version = "2.11" }) }
option3 = @{
type = "dict"
options = @{
option1 = @{ type = "str"; aliases = "alias1"; deprecated_aliases = @(@{name = "alias1"; version = "2.10" }) }
option2 = @{ type = "str"; aliases = "alias2"; deprecated_aliases = @(@{name = "alias2"; version = "2.11" }) }
option3 = @{
type = "str"
aliases = "alias3"
deprecated_aliases = @(
@{name = "alias3"; version = "2.12"; collection_name = "ansible.builtin" }
)
}
option4 = @{ type = "str"; aliases = "alias4"; deprecated_aliases = @(@{name = "alias4"; date = [DateTime]"2020-03-11" }) }
option5 = @{ type = "str"; aliases = "alias5"; deprecated_aliases = @(@{name = "alias5"; date = [DateTime]"2020-03-09" }) }
option6 = @{
type = "str"
aliases = "alias6"
deprecated_aliases = @(
@{name = "alias6"; date = [DateTime]"2020-06-01"; collection_name = "ansible.builtin" }
)
}
}
}
option4 = @{ type = "str"; aliases = "alias4"; deprecated_aliases = @(@{name = "alias4"; date = [DateTime]"2020-03-10" }) }
option5 = @{ type = "str"; aliases = "alias5"; deprecated_aliases = @(@{name = "alias5"; date = [DateTime]"2020-03-12" }) }
option6 = @{
type = "str"
aliases = "alias6"
deprecated_aliases = @(
@{name = "alias6"; version = "2.12"; collection_name = "ansible.builtin" }
)
}
option7 = @{
type = "str"
aliases = "alias7"
deprecated_aliases = @(
@{name = "alias7"; date = [DateTime]"2020-06-07"; collection_name = "ansible.builtin" }
)
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
alias1 = "alias1"
option2 = "option2"
option3 = @{
option1 = "option1"
alias2 = "alias2"
alias3 = "alias3"
option4 = "option4"
alias5 = "alias5"
alias6 = "alias6"
}
option4 = "option4"
alias5 = "alias5"
alias6 = "alias6"
alias7 = "alias7"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
invocation = @{
module_args = @{
alias1 = "alias1"
option1 = "alias1"
option2 = "option2"
option3 = @{
option1 = "option1"
option2 = "alias2"
alias2 = "alias2"
option3 = "alias3"
alias3 = "alias3"
option4 = "option4"
option5 = "alias5"
alias5 = "alias5"
option6 = "alias6"
alias6 = "alias6"
}
option4 = "option4"
option5 = "alias5"
alias5 = "alias5"
option6 = "alias6"
alias6 = "alias6"
option7 = "alias7"
alias7 = "alias7"
}
}
deprecations = @(
@{
msg = "Alias 'alias7' is deprecated. See the module docs for more information"
date = "2020-06-07"
collection_name = "ansible.builtin"
},
@{
msg = "Alias 'alias1' is deprecated. See the module docs for more information"
version = "2.10"
collection_name = $null
},
@{
msg = "Alias 'alias5' is deprecated. See the module docs for more information"
date = "2020-03-12"
collection_name = $null
},
@{
msg = "Alias 'alias6' is deprecated. See the module docs for more information"
version = "2.12"
collection_name = "ansible.builtin"
},
@{
msg = "Alias 'alias2' is deprecated. See the module docs for more information - found in option3"
version = "2.11"
collection_name = $null
},
@{
msg = "Alias 'alias5' is deprecated. See the module docs for more information - found in option3"
date = "2020-03-09"
collection_name = $null
},
@{
msg = "Alias 'alias3' is deprecated. See the module docs for more information - found in option3"
version = "2.12"
collection_name = "ansible.builtin"
},
@{
msg = "Alias 'alias6' is deprecated. See the module docs for more information - found in option3"
date = "2020-06-01"
collection_name = "ansible.builtin"
}
)
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Required by - single value" = {
$spec = @{
options = @{
option1 = @{type = "str" }
option2 = @{type = "str" }
option3 = @{type = "str" }
}
required_by = @{
option1 = "option2"
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "option1"
option2 = "option2"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
invocation = @{
module_args = @{
option1 = "option1"
option2 = "option2"
option3 = $null
}
}
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Required by - multiple values" = {
$spec = @{
options = @{
option1 = @{type = "str" }
option2 = @{type = "str" }
option3 = @{type = "str" }
}
required_by = @{
option1 = "option2", "option3"
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "option1"
option2 = "option2"
option3 = "option3"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
invocation = @{
module_args = @{
option1 = "option1"
option2 = "option2"
option3 = "option3"
}
}
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Required by explicit null" = {
$spec = @{
options = @{
option1 = @{type = "str" }
option2 = @{type = "str" }
option3 = @{type = "str" }
}
required_by = @{
option1 = "option2"
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "option1"
option2 = $null
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
invocation = @{
module_args = @{
option1 = "option1"
option2 = $null
option3 = $null
}
}
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Required by failed - single value" = {
$spec = @{
options = @{
option1 = @{type = "str" }
option2 = @{type = "str" }
option3 = @{type = "str" }
}
required_by = @{
option1 = "option2"
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "option1"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
failed = $true
invocation = @{
module_args = @{
option1 = "option1"
}
}
msg = "missing parameter(s) required by 'option1': option2"
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Required by failed - multiple values" = {
$spec = @{
options = @{
option1 = @{type = "str" }
option2 = @{type = "str" }
option3 = @{type = "str" }
}
required_by = @{
option1 = "option2", "option3"
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "option1"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
failed = $true
invocation = @{
module_args = @{
option1 = "option1"
}
}
msg = "missing parameter(s) required by 'option1': option2, option3"
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Debug without debug set" = {
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_debug = $false
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.Debug("debug message")
$actual_event = (Get-EventLog -LogName Application -Source Ansible -Newest 1).Message
$actual_event | Assert-Equal -Expected "undefined win module - Invoked with:`r`n "
}
"Debug with debug set" = {
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_debug = $true
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.Debug("debug message")
$actual_event = (Get-EventLog -LogName Application -Source Ansible -Newest 1).Message
$actual_event | Assert-Equal -Expected "undefined win module - [DEBUG] debug message"
}
"Deprecate and warn with version" = {
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.Deprecate("message", "2.7")
$actual_deprecate_event_1 = Get-EventLog -LogName Application -Source Ansible -Newest 1
$m.Deprecate("message w collection", "2.8", "ansible.builtin")
$actual_deprecate_event_2 = Get-EventLog -LogName Application -Source Ansible -Newest 1
$m.Warn("warning")
$actual_warn_event = Get-EventLog -LogName Application -Source Ansible -Newest 1
$actual_deprecate_event_1.Message | Assert-Equal -Expected "undefined win module - [DEPRECATION WARNING] message 2.7"
$actual_deprecate_event_2.Message | Assert-Equal -Expected "undefined win module - [DEPRECATION WARNING] message w collection 2.8"
$actual_warn_event.EntryType | Assert-Equal -Expected "Warning"
$actual_warn_event.Message | Assert-Equal -Expected "undefined win module - [WARNING] warning"
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
invocation = @{
module_args = @{}
}
warnings = @("warning")
deprecations = @(
@{msg = "message"; version = "2.7"; collection_name = $null },
@{msg = "message w collection"; version = "2.8"; collection_name = "ansible.builtin" }
)
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Deprecate and warn with date" = {
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.Deprecate("message", [DateTime]"2020-01-01")
$actual_deprecate_event_1 = Get-EventLog -LogName Application -Source Ansible -Newest 1
$m.Deprecate("message w collection", [DateTime]"2020-01-02", "ansible.builtin")
$actual_deprecate_event_2 = Get-EventLog -LogName Application -Source Ansible -Newest 1
$m.Warn("warning")
$actual_warn_event = Get-EventLog -LogName Application -Source Ansible -Newest 1
$actual_deprecate_event_1.Message | Assert-Equal -Expected "undefined win module - [DEPRECATION WARNING] message 2020-01-01"
$actual_deprecate_event_2.Message | Assert-Equal -Expected "undefined win module - [DEPRECATION WARNING] message w collection 2020-01-02"
$actual_warn_event.EntryType | Assert-Equal -Expected "Warning"
$actual_warn_event.Message | Assert-Equal -Expected "undefined win module - [WARNING] warning"
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected = @{
changed = $false
invocation = @{
module_args = @{}
}
warnings = @("warning")
deprecations = @(
@{msg = "message"; date = "2020-01-01"; collection_name = $null },
@{msg = "message w collection"; date = "2020-01-02"; collection_name = "ansible.builtin" }
)
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"FailJson with message" = {
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$failed = $false
try {
$m.FailJson("fail message")
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
invocation = @{
module_args = @{}
}
failed = $true
msg = "fail message"
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"FailJson with Exception" = {
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
try {
[System.IO.Path]::GetFullPath($null)
}
catch {
$excp = $_.Exception
}
$failed = $false
try {
$m.FailJson("fail message", $excp)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
invocation = @{
module_args = @{}
}
failed = $true
msg = "fail message"
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"FailJson with ErrorRecord" = {
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
try {
Get-Item -LiteralPath $null
}
catch {
$error_record = $_
}
$failed = $false
try {
$m.FailJson("fail message", $error_record)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
invocation = @{
module_args = @{}
}
failed = $true
msg = "fail message"
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"FailJson with Exception and verbosity 3" = {
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_verbosity = 3
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
try {
[System.IO.Path]::GetFullPath($null)
}
catch {
$excp = $_.Exception
}
$failed = $false
try {
$m.FailJson("fail message", $excp)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $failed
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{} }
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected "fail message"
$expected = 'System.Management.Automation.MethodInvocationException: Exception calling "GetFullPath" with "1" argument(s)'
$actual.exception.Contains($expected) | Assert-Equal -Expected $true
}
"FailJson with ErrorRecord and verbosity 3" = {
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_verbosity = 3
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
try {
Get-Item -LiteralPath $null
}
catch {
$error_record = $_
}
$failed = $false
try {
$m.FailJson("fail message", $error_record)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $failed
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{} }
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected "fail message"
$actual.exception.Contains("Cannot bind argument to parameter 'LiteralPath' because it is null") | Assert-Equal -Expected $true
$actual.exception.Contains("+ Get-Item -LiteralPath `$null") | Assert-Equal -Expected $true
$actual.exception.Contains("ScriptStackTrace:") | Assert-Equal -Expected $true
}
"Diff entry without diff set" = {
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.Diff.before = @{a = "a" }
$m.Diff.after = @{b = "b" }
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
invocation = @{
module_args = @{}
}
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"Diff entry with diff set" = {
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_diff = $true
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$m.Diff.before = @{a = "a" }
$m.Diff.after = @{b = "b" }
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $failed
$expected = @{
changed = $false
invocation = @{
module_args = @{}
}
diff = @{
before = @{a = "a" }
after = @{b = "b" }
}
}
$actual | Assert-DictionaryEqual -Expected $expected
}
"ParseBool tests" = {
$mapping = New-Object -TypeName 'System.Collections.Generic.Dictionary`2[[Object], [Bool]]'
$mapping.Add("y", $true)
$mapping.Add("Y", $true)
$mapping.Add("yes", $true)
$mapping.Add("Yes", $true)
$mapping.Add("on", $true)
$mapping.Add("On", $true)
$mapping.Add("1", $true)
$mapping.Add(1, $true)
$mapping.Add("true", $true)
$mapping.Add("True", $true)
$mapping.Add("t", $true)
$mapping.Add("T", $true)
$mapping.Add("1.0", $true)
$mapping.Add(1.0, $true)
$mapping.Add($true, $true)
$mapping.Add("n", $false)
$mapping.Add("N", $false)
$mapping.Add("no", $false)
$mapping.Add("No", $false)
$mapping.Add("off", $false)
$mapping.Add("Off", $false)
$mapping.Add("0", $false)
$mapping.Add(0, $false)
$mapping.Add("false", $false)
$mapping.Add("False", $false)
$mapping.Add("f", $false)
$mapping.Add("F", $false)
$mapping.Add("0.0", $false)
$mapping.Add(0.0, $false)
$mapping.Add($false, $false)
foreach ($map in $mapping.GetEnumerator()) {
$expected = $map.Value
$actual = [Ansible.Basic.AnsibleModule]::ParseBool($map.Key)
$actual | Assert-Equal -Expected $expected
$actual.GetType().FullName | Assert-Equal -Expected "System.Boolean"
}
$fail_bools = @(
"falsey",
"abc",
2,
"2",
-1
)
foreach ($fail_bool in $fail_bools) {
$failed = $false
try {
[Ansible.Basic.AnsibleModule]::ParseBool($fail_bool)
}
catch {
$failed = $true
$_.Exception.Message.Contains("The value '$fail_bool' is not a valid boolean") | Assert-Equal -Expected $true
}
$failed | Assert-Equal -Expected $true
}
}
"Unknown internal key" = {
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_invalid = "invalid"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$expected = @{
invocation = @{
module_args = @{
_ansible_invalid = "invalid"
}
}
changed = $false
failed = $true
msg = "Unsupported parameters for (undefined win module) module: _ansible_invalid. Supported parameters include: "
}
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
$actual | Assert-DictionaryEqual -Expected $expected
}
$failed | Assert-Equal -Expected $true
}
"Module tmpdir with present remote tmp" = {
$current_user = [System.Security.Principal.WindowsIdentity]::GetCurrent().User
$dir_security = New-Object -TypeName System.Security.AccessControl.DirectorySecurity
$dir_security.SetOwner($current_user)
$dir_security.SetAccessRuleProtection($true, $false)
$ace = New-Object -TypeName System.Security.AccessControl.FileSystemAccessRule -ArgumentList @(
$current_user, [System.Security.AccessControl.FileSystemRights]::FullControl,
[System.Security.AccessControl.InheritanceFlags]"ContainerInherit, ObjectInherit",
[System.Security.AccessControl.PropagationFlags]::None, [System.Security.AccessControl.AccessControlType]::Allow
)
$dir_security.AddAccessRule($ace)
$expected_sd = $dir_security.GetSecurityDescriptorSddlForm("Access, Owner")
$remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
New-Item -Path $remote_tmp -ItemType Directory > $null
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_remote_tmp = $remote_tmp.ToString()
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
(Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$actual_tmpdir = $m.Tmpdir
$parent_tmpdir = Split-Path -Path $actual_tmpdir -Parent
$tmpdir_name = Split-Path -Path $actual_tmpdir -Leaf
$parent_tmpdir | Assert-Equal -Expected $remote_tmp
$tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equal -Expected $true
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$children = [System.IO.Directory]::EnumerateDirectories($remote_tmp)
$children.Count | Assert-Equal -Expected 1
$actual_tmpdir_sd = (Get-Acl -Path $actual_tmpdir).GetSecurityDescriptorSddlForm("Access, Owner")
$actual_tmpdir_sd | Assert-Equal -Expected $expected_sd
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false
(Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$output.warnings.Count | Assert-Equal -Expected 0
}
"Module tmpdir with missing remote_tmp" = {
$current_user = [System.Security.Principal.WindowsIdentity]::GetCurrent().User
$dir_security = New-Object -TypeName System.Security.AccessControl.DirectorySecurity
$dir_security.SetOwner($current_user)
$dir_security.SetAccessRuleProtection($true, $false)
$ace = New-Object -TypeName System.Security.AccessControl.FileSystemAccessRule -ArgumentList @(
$current_user, [System.Security.AccessControl.FileSystemRights]::FullControl,
[System.Security.AccessControl.InheritanceFlags]"ContainerInherit, ObjectInherit",
[System.Security.AccessControl.PropagationFlags]::None, [System.Security.AccessControl.AccessControlType]::Allow
)
$dir_security.AddAccessRule($ace)
$expected_sd = $dir_security.GetSecurityDescriptorSddlForm("Access, Owner")
$remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_remote_tmp = $remote_tmp.ToString()
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
(Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $false
$actual_tmpdir = $m.Tmpdir
$parent_tmpdir = Split-Path -Path $actual_tmpdir -Parent
$tmpdir_name = Split-Path -Path $actual_tmpdir -Leaf
$parent_tmpdir | Assert-Equal -Expected $remote_tmp
$tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equal -Expected $true
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$children = [System.IO.Directory]::EnumerateDirectories($remote_tmp)
$children.Count | Assert-Equal -Expected 1
$actual_remote_sd = (Get-Acl -Path $remote_tmp).GetSecurityDescriptorSddlForm("Access, Owner")
$actual_tmpdir_sd = (Get-Acl -Path $actual_tmpdir).GetSecurityDescriptorSddlForm("Access, Owner")
$actual_remote_sd | Assert-Equal -Expected $expected_sd
$actual_tmpdir_sd | Assert-Equal -Expected $expected_sd
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false
(Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$output.warnings.Count | Assert-Equal -Expected 1
$nt_account = $current_user.Translate([System.Security.Principal.NTAccount])
$actual_warning = "Module remote_tmp $remote_tmp did not exist and was created with FullControl to $nt_account, "
$actual_warning += "this may cause issues when running as another user. To avoid this, "
$actual_warning += "create the remote_tmp dir with the correct permissions manually"
$actual_warning | Assert-Equal -Expected $output.warnings[0]
}
"Module tmp, keep remote files" = {
$remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
New-Item -Path $remote_tmp -ItemType Directory > $null
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_remote_tmp = $remote_tmp.ToString()
_ansible_keep_remote_files = $true
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$actual_tmpdir = $m.Tmpdir
$parent_tmpdir = Split-Path -Path $actual_tmpdir -Parent
$tmpdir_name = Split-Path -Path $actual_tmpdir -Leaf
$parent_tmpdir | Assert-Equal -Expected $remote_tmp
$tmpdir_name.StartSwith("ansible-moduletmp-") | Assert-Equal -Expected $true
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $remote_tmp -PathType Container) | Assert-Equal -Expected $true
$output.warnings.Count | Assert-Equal -Expected 0
Remove-Item -LiteralPath $actual_tmpdir -Force -Recurse
}
"Module tmpdir with symlinks" = {
$remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
New-Item -Path $remote_tmp -ItemType Directory > $null
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_remote_tmp = $remote_tmp.ToString()
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$actual_tmpdir = $m.Tmpdir
$dir1 = Join-Path $actual_tmpdir Dir1
$dir2 = Join-Path $actual_tmpdir Dir2
$dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null
$file1 = Join-Path $dir1 test.txt
$file2 = Join-Path $dir2 test.txt
$file3 = Join-Path $actual_tmpdir test.txt
Set-Content -LiteralPath $file1 ''
Set-Content -LiteralPath $file2 ''
Set-Content -LiteralPath $file3 ''
$outside_target = Join-Path -Path $tmpdir -ChildPath "moduleoutsidedir-$(Get-Random)"
$outside_file = Join-Path -Path $outside_target -ChildPath "file"
New-Item -Path $outside_target -ItemType Directory > $null
Set-Content -LiteralPath $outside_file ''
cmd.exe /c mklink /d "$dir1\missing-dir-link" "$actual_tmpdir\fake"
cmd.exe /c mklink /d "$dir1\good-dir-link" "$dir2"
cmd.exe /c mklink /d "$dir1\recursive-target-link" "$dir1"
cmd.exe /c mklink "$dir1\missing-file-link" "$actual_tmpdir\fake"
cmd.exe /c mklink "$dir1\good-file-link" "$dir2\test.txt"
cmd.exe /c mklink /d "$actual_tmpdir\outside-dir" $outside_target
cmd.exe /c mklink "$actual_tmpdir\outside-file" $outside_file
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$output.warnings.Count | Assert-Equal -Expected 0
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false
(Test-Path -LiteralPath $outside_target -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $outside_file -PathType Leaf) | Assert-Equal -Expected $true
Remove-Item -LiteralPath $remote_tmp -Force -Recurse
}
"Module tmpdir with undeletable file" = {
$remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
New-Item -Path $remote_tmp -ItemType Directory > $null
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_remote_tmp = $remote_tmp.ToString()
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$actual_tmpdir = $m.Tmpdir
$dir1 = Join-Path $actual_tmpdir Dir1
$dir2 = Join-Path $actual_tmpdir Dir2
$dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null
$file1 = Join-Path $dir1 test.txt
$file2 = Join-Path $dir2 test.txt
$file3 = Join-Path $actual_tmpdir test.txt
Set-Content -LiteralPath $file1 ''
Set-Content -LiteralPath $file2 ''
Set-Content -LiteralPath $file3 ''
$fs = [System.IO.File]::Open($file1, "Open", "Read", "None")
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$expected_msg = "Failure cleaning temp path '$actual_tmpdir': IOException Directory contains files still open by other processes"
$output.warnings.Count | Assert-Equal -Expected 1
$output.warnings[0] | Assert-Equal -Expected $expected_msg
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true
# Test-Path tries to open the file in a way that fails if it's marked as deleted
(Get-ChildItem -LiteralPath $dir1 -File).Count | Assert-Equal -Expected 1
(Test-Path -LiteralPath $dir2 -PathType Container) | Assert-Equal -Expected $false
(Test-Path -LiteralPath $file3 -PathType Leaf) | Assert-Equal -Expected $false
# Releasing the file handle releases the lock on the file but as the
# cleanup couldn't access the file to mark as delete on close it is
# still going to be present.
$fs.Dispose()
(Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $file1 -PathType Leaf) | Assert-Equal -Expected $true
Remove-Item -LiteralPath $remote_tmp -Force -Recurse
}
"Module tmpdir delete with locked handle" = {
$remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
New-Item -Path $remote_tmp -ItemType Directory > $null
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_remote_tmp = $remote_tmp.ToString()
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
$actual_tmpdir = $m.Tmpdir
$dir1 = Join-Path $actual_tmpdir Dir1
$dir2 = Join-Path $actual_tmpdir Dir2
$dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null
$file1 = Join-Path $dir1 test.txt
$file2 = Join-Path $dir2 test.txt
$file3 = Join-Path $actual_tmpdir test.txt
Set-Content -LiteralPath $file1 ''
Set-Content -LiteralPath $file2 ''
Set-Content -LiteralPath $file3 ''
[System.IO.File]::SetAttributes($file1, "ReadOnly")
[System.IO.File]::SetAttributes($file2, "ReadOnly")
[System.IO.File]::SetAttributes($file3, "ReadOnly")
$fs = [System.IO.File]::Open($file1, "Open", "Read", "Delete")
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
if ([System.Environment]::OSVersion.Version -lt [Version]'10.0') {
# Older hosts can only do delete on close. This means Dir1 and its
# file will still be present but Dir2 should be deleted.
$expected_msg = "Failure cleaning temp path '$actual_tmpdir': IOException Directory contains files still open by other processes"
$output.warnings.Count | Assert-Equal -Expected 1
$output.warnings[0] | Assert-Equal -Expected $expected_msg
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true
# Test-Path tries to open the file in a way that fails if it's marked as deleted
(Get-ChildItem -LiteralPath $dir1 -File).Count | Assert-Equal -Expected 1
(Test-Path -LiteralPath $dir2 -PathType Container) | Assert-Equal -Expected $false
(Test-Path -LiteralPath $file3 -PathType Leaf) | Assert-Equal -Expected $false
# Releasing the file handle releases the lock on the file deleting
# it. Unfortunately the parent dir will still be present
$fs.Dispose()
(Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true
(Test-Path -LiteralPath $file1 -PathType Leaf) | Assert-Equal -Expected $false
}
else {
# Server 2016+ can use the POSIX APIs which will delete it straight away
(Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false
$output.warnings.Count | Assert-Equal -Expected 0
$fs.Dispose()
}
Remove-Item -LiteralPath $remote_tmp -Force -Recurse
}
"Invalid argument spec key" = {
$spec = @{
invalid = $true
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: argument spec entry contains an invalid key 'invalid', valid keys: apply_defaults, "
$expected_msg += "aliases, choices, default, deprecated_aliases, elements, mutually_exclusive, no_log, options, "
$expected_msg += "removed_in_version, removed_at_date, removed_from_collection, required, required_by, required_if, "
$expected_msg += "required_one_of, required_together, supports_check_mode, type"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid argument spec key - nested" = {
$spec = @{
options = @{
option_key = @{
options = @{
sub_option_key = @{
invalid = $true
}
}
}
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: argument spec entry contains an invalid key 'invalid', valid keys: apply_defaults, "
$expected_msg += "aliases, choices, default, deprecated_aliases, elements, mutually_exclusive, no_log, options, "
$expected_msg += "removed_in_version, removed_at_date, removed_from_collection, required, required_by, required_if, "
$expected_msg += "required_one_of, required_together, supports_check_mode, type - found in option_key -> sub_option_key"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid argument spec value type" = {
$spec = @{
apply_defaults = "abc"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: argument spec for 'apply_defaults' did not match expected "
$expected_msg += "type System.Boolean: actual type System.String"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid argument spec option type" = {
$spec = @{
options = @{
option_key = @{
type = "invalid type"
}
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: type 'invalid type' is unsupported - found in option_key. "
$expected_msg += "Valid types are: bool, dict, float, int, json, list, path, raw, sid, str"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid argument spec option element type" = {
$spec = @{
options = @{
option_key = @{
type = "list"
elements = "invalid type"
}
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: elements 'invalid type' is unsupported - found in option_key. "
$expected_msg += "Valid types are: bool, dict, float, int, json, list, path, raw, sid, str"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid deprecated aliases entry - no version and date" = {
$spec = @{
options = @{
option_key = @{
type = "str"
aliases = , "alias_name"
deprecated_aliases = @(
@{name = "alias_name" }
)
}
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: One of version or date is required in a deprecated_aliases entry"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid deprecated aliases entry - no name (nested)" = {
$spec = @{
options = @{
option_key = @{
type = "dict"
options = @{
sub_option_key = @{
type = "str"
aliases = , "alias_name"
deprecated_aliases = @(
@{version = "2.10" }
)
}
}
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = @{
sub_option_key = "a"
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.ArgumentException] {
$failed = $true
$expected_msg = "name is required in a deprecated_aliases entry - found in option_key"
$_.Exception.Message | Assert-Equal -Expected $expected_msg
}
$failed | Assert-Equal -Expected $true
}
"Invalid deprecated aliases entry - both version and date" = {
$spec = @{
options = @{
option_key = @{
type = "str"
aliases = , "alias_name"
deprecated_aliases = @(
@{
name = "alias_name"
date = [DateTime]"2020-03-10"
version = "2.11"
}
)
}
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: Only one of version or date is allowed in a deprecated_aliases entry"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Invalid deprecated aliases entry - wrong date type" = {
$spec = @{
options = @{
option_key = @{
type = "str"
aliases = , "alias_name"
deprecated_aliases = @(
@{
name = "alias_name"
date = "2020-03-10"
}
)
}
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: A deprecated_aliases date must be a DateTime object"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Spec required and default set at the same time" = {
$spec = @{
options = @{
option_key = @{
required = $true
default = "default value"
}
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: required and default are mutually exclusive for option_key"
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
("exception" -cin $actual.Keys) | Assert-Equal -Expected $true
}
"Unsupported options" = {
$spec = @{
options = @{
option_key = @{
type = "str"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "abc"
invalid_key = "def"
another_key = "ghi"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "Unsupported parameters for (undefined win module) module: another_key, invalid_key. "
$expected_msg += "Supported parameters include: option_key"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Unsupported options with ignore" = {
$spec = @{
options = @{
option_key = @{
type = "str"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "abc"
invalid_key = "def"
another_key = "ghi"
_ansible_ignore_unknown_opts = $true
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$m.Params | Assert-DictionaryEqual -Expected @{ option_key = "abc"; invalid_key = "def"; another_key = "ghi" }
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$output.Keys.Count | Assert-Equal -Expected 2
$output.changed | Assert-Equal -Expected $false
$output.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "abc"; invalid_key = "def"; another_key = "ghi" } }
}
"Check mode and module doesn't support check mode" = {
$spec = @{
options = @{
option_key = @{
type = "str"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_check_mode = $true
option_key = "abc"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "remote module (undefined win module) does not support check mode"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.skipped | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "abc" } }
}
"Check mode with suboption without supports_check_mode" = {
$spec = @{
options = @{
sub_options = @{
# This tests the situation where a sub key doesn't set supports_check_mode, the logic in
# Ansible.Basic automatically sets that to $false and we want it to ignore it for a nested check
type = "dict"
options = @{
sub_option = @{ type = "str"; default = "value" }
}
}
}
supports_check_mode = $true
}
Set-Variable -Name complex_args -Scope Global -Value @{
_ansible_check_mode = $true
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$m.CheckMode | Assert-Equal -Expected $true
}
"Type conversion error" = {
$spec = @{
options = @{
option_key = @{
type = "int"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "a"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "argument for option_key is of type System.String and we were unable to convert to int: "
$expected_msg += "Input string was not in a correct format."
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Type conversion error - delegate" = {
$spec = @{
options = @{
option_key = @{
type = "dict"
options = @{
sub_option_key = @{
type = [Func[[Object], [UInt64]]] { [System.UInt64]::Parse($args[0]) }
}
}
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = @{
sub_option_key = "a"
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "argument for sub_option_key is of type System.String and we were unable to convert to delegate: "
$expected_msg += "Exception calling `"Parse`" with `"1`" argument(s): `"Input string was not in a correct format.`" "
$expected_msg += "found in option_key"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Numeric choices" = {
$spec = @{
options = @{
option_key = @{
choices = 1, 2, 3
type = "int"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "2"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$output.Keys.Count | Assert-Equal -Expected 2
$output.changed | Assert-Equal -Expected $false
$output.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = 2 } }
}
"Case insensitive choice" = {
$spec = @{
options = @{
option_key = @{
choices = "abc", "def"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "ABC"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$expected_warning = "value of option_key was a case insensitive match of one of: abc, def. "
$expected_warning += "Checking of choices will be case sensitive in a future Ansible release. "
$expected_warning += "Case insensitive matches were: ABC"
$output.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "ABC" } }
# We have disabled the warnings for now
#$output.warnings.Count | Assert-Equal -Expected 1
#$output.warnings[0] | Assert-Equal -Expected $expected_warning
}
"Case insensitive choice no_log" = {
$spec = @{
options = @{
option_key = @{
choices = "abc", "def"
no_log = $true
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "ABC"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$expected_warning = "value of option_key was a case insensitive match of one of: abc, def. "
$expected_warning += "Checking of choices will be case sensitive in a future Ansible release. "
$expected_warning += "Case insensitive matches were: VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
$output.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" } }
# We have disabled the warnings for now
#$output.warnings.Count | Assert-Equal -Expected 1
#$output.warnings[0] | Assert-Equal -Expected $expected_warning
}
"Case insentitive choice as list" = {
$spec = @{
options = @{
option_key = @{
choices = "abc", "def", "ghi", "JKL"
type = "list"
elements = "str"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "AbC", "ghi", "jkl"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$expected_warning = "value of option_key was a case insensitive match of one or more of: abc, def, ghi, JKL. "
$expected_warning += "Checking of choices will be case sensitive in a future Ansible release. "
$expected_warning += "Case insensitive matches were: AbC, jkl"
$output.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
# We have disabled the warnings for now
#$output.warnings.Count | Assert-Equal -Expected 1
#$output.warnings[0] | Assert-Equal -Expected $expected_warning
}
"Invalid choice" = {
$spec = @{
options = @{
option_key = @{
choices = "a", "b"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "c"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "value of option_key must be one of: a, b. Got no match for: c"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Invalid choice with no_log" = {
$spec = @{
options = @{
option_key = @{
choices = "a", "b"
no_log = $true
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "abc"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "value of option_key must be one of: a, b. Got no match for: ********"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{option_key = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" } }
}
"Invalid choice in list" = {
$spec = @{
options = @{
option_key = @{
choices = "a", "b"
type = "list"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = "a", "c"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "value of option_key must be one or more of: a, b. Got no match for: c"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Mutually exclusive options" = {
$spec = @{
options = @{
option1 = @{}
option2 = @{}
}
mutually_exclusive = @(, @("option1", "option2"))
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "a"
option2 = "b"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "parameters are mutually exclusive: option1, option2"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Missing required argument" = {
$spec = @{
options = @{
option1 = @{}
option2 = @{required = $true }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "a"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "missing required arguments: option2"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Missing required argument subspec - no value defined" = {
$spec = @{
options = @{
option_key = @{
type = "dict"
options = @{
sub_option_key = @{
required = $true
}
}
}
}
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.Keys.Count | Assert-Equal -Expected 2
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Missing required argument subspec" = {
$spec = @{
options = @{
option_key = @{
type = "dict"
options = @{
sub_option_key = @{
required = $true
}
another_key = @{}
}
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = @{
another_key = "abc"
}
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "missing required arguments: sub_option_key found in option_key"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required together not set" = {
$spec = @{
options = @{
option1 = @{}
option2 = @{}
}
required_together = @(, @("option1", "option2"))
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "abc"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "parameters are required together: option1, option2"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required together not set - subspec" = {
$spec = @{
options = @{
option_key = @{
type = "dict"
options = @{
option1 = @{}
option2 = @{}
}
required_together = @(, @("option1", "option2"))
}
another_option = @{}
}
required_together = @(, @("option_key", "another_option"))
}
Set-Variable -Name complex_args -Scope Global -Value @{
option_key = @{
option1 = "abc"
}
another_option = "def"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "parameters are required together: option1, option2 found in option_key"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required one of not set" = {
$spec = @{
options = @{
option1 = @{}
option2 = @{}
option3 = @{}
}
required_one_of = @(@("option1", "option2"), @("option2", "option3"))
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "abc"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "one of the following is required: option2, option3"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if invalid entries" = {
$spec = @{
options = @{
state = @{choices = "absent", "present"; default = "present" }
path = @{type = "path" }
}
required_if = @(, @("state", "absent"))
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "internal error: invalid required_if value count of 2, expecting 3 or 4 entries"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if no missing option" = {
$spec = @{
options = @{
state = @{choices = "absent", "present"; default = "present" }
name = @{}
path = @{type = "path" }
}
required_if = @(, @("state", "absent", @("name", "path")))
}
Set-Variable -Name complex_args -Scope Global -Value @{
name = "abc"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.Keys.Count | Assert-Equal -Expected 2
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if missing option" = {
$spec = @{
options = @{
state = @{choices = "absent", "present"; default = "present" }
name = @{}
path = @{type = "path" }
}
required_if = @(, @("state", "absent", @("name", "path")))
}
Set-Variable -Name complex_args -Scope Global -Value @{
state = "absent"
name = "abc"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "state is absent but all of the following are missing: path"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if missing option and required one is set" = {
$spec = @{
options = @{
state = @{choices = "absent", "present"; default = "present" }
name = @{}
path = @{type = "path" }
}
required_if = @(, @("state", "absent", @("name", "path"), $true))
}
Set-Variable -Name complex_args -Scope Global -Value @{
state = "absent"
}
$failed = $false
try {
$null = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$expected_msg = "state is absent but any of the following are missing: name, path"
$actual.Keys.Count | Assert-Equal -Expected 4
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected $expected_msg
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Required if missing option but one required set" = {
$spec = @{
options = @{
state = @{choices = "absent", "present"; default = "present" }
name = @{}
path = @{type = "path" }
}
required_if = @(, @("state", "absent", @("name", "path"), $true))
}
Set-Variable -Name complex_args -Scope Global -Value @{
state = "absent"
name = "abc"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.Keys.Count | Assert-Equal -Expected 2
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"PS Object in return result" = {
$m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
# JavaScriptSerializer struggles with PS Object like PSCustomObject due to circular references, this test makes
# sure we can handle these types of objects without bombing
$m.Result.output = [PSCustomObject]@{a = "a"; b = "b" }
$failed = $true
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.Keys.Count | Assert-Equal -Expected 3
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = @{} }
$actual.output | Assert-DictionaryEqual -Expected @{a = "a"; b = "b" }
}
"String json array to object" = {
$input_json = '["abc", "def"]'
$actual = [Ansible.Basic.AnsibleModule]::FromJson($input_json)
$actual -is [Array] | Assert-Equal -Expected $true
$actual.Length | Assert-Equal -Expected 2
$actual[0] | Assert-Equal -Expected "abc"
$actual[1] | Assert-Equal -Expected "def"
}
"String json array of dictionaries to object" = {
$input_json = '[{"abc":"def"}]'
$actual = [Ansible.Basic.AnsibleModule]::FromJson($input_json)
$actual -is [Array] | Assert-Equal -Expected $true
$actual.Length | Assert-Equal -Expected 1
$actual[0] | Assert-DictionaryEqual -Expected @{"abc" = "def" }
}
"Spec with fragments" = {
$spec = @{
options = @{
option1 = @{ type = "str" }
}
}
$fragment1 = @{
options = @{
option2 = @{ type = "str" }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "option1"
option2 = "option2"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec, @($fragment1))
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{module_args = $complex_args }
}
"Fragment spec that with a deprecated alias" = {
$spec = @{
options = @{
option1 = @{
aliases = @("alias1_spec")
type = "str"
deprecated_aliases = @(
@{name = "alias1_spec"; version = "2.0" }
)
}
option2 = @{
aliases = @("alias2_spec")
deprecated_aliases = @(
@{name = "alias2_spec"; version = "2.0"; collection_name = "ansible.builtin" }
)
}
}
}
$fragment1 = @{
options = @{
option1 = @{
aliases = @("alias1")
deprecated_aliases = @() # Makes sure it doesn't overwrite the spec, just adds to it.
}
option2 = @{
aliases = @("alias2")
deprecated_aliases = @(
@{name = "alias2"; version = "2.0"; collection_name = "foo.bar" }
)
type = "str"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
alias1_spec = "option1"
alias2 = "option2"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec, @($fragment1))
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.deprecations.Count | Assert-Equal -Expected 2
$actual.deprecations[0] | Assert-DictionaryEqual -Expected @{
msg = "Alias 'alias1_spec' is deprecated. See the module docs for more information"; version = "2.0"; collection_name = $null
}
$actual.deprecations[1] | Assert-DictionaryEqual -Expected @{
msg = "Alias 'alias2' is deprecated. See the module docs for more information"; version = "2.0"; collection_name = "foo.bar"
}
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{
module_args = @{
option1 = "option1"
alias1_spec = "option1"
option2 = "option2"
alias2 = "option2"
}
}
}
"Fragment spec with mutual args" = {
$spec = @{
options = @{
option1 = @{ type = "str" }
option2 = @{ type = "str" }
}
mutually_exclusive = @(
, @('option1', 'option2')
)
}
$fragment1 = @{
options = @{
fragment1_1 = @{ type = "str" }
fragment1_2 = @{ type = "str" }
}
mutually_exclusive = @(
, @('fragment1_1', 'fragment1_2')
)
}
$fragment2 = @{
options = @{
fragment2 = @{ type = "str" }
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "option1"
fragment1_1 = "fragment1_1"
fragment1_2 = "fragment1_2"
fragment2 = "fragment2"
}
$failed = $false
try {
[Ansible.Basic.AnsibleModule]::Create(@(), $spec, @($fragment1, $fragment2))
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.changed | Assert-Equal -Expected $false
$actual.failed | Assert-Equal -Expected $true
$actual.msg | Assert-Equal -Expected "parameters are mutually exclusive: fragment1_1, fragment1_2"
$actual.invocation | Assert-DictionaryEqual -Expected @{ module_args = $complex_args }
}
"Fragment spec with no_log" = {
$spec = @{
options = @{
option1 = @{
aliases = @("alias")
}
}
}
$fragment1 = @{
options = @{
option1 = @{
no_log = $true # Makes sure that a value set in the fragment but not in the spec is respected.
type = "str"
}
}
}
Set-Variable -Name complex_args -Scope Global -Value @{
alias = "option1"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec, @($fragment1))
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.changed | Assert-Equal -Expected $false
$actual.invocation | Assert-DictionaryEqual -Expected @{
module_args = @{
option1 = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
alias = "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
}
}
}
"Catch invalid fragment spec format" = {
$spec = @{
options = @{
option1 = @{ type = "str" }
}
}
$fragment = @{
options = @{}
invalid = "will fail"
}
Set-Variable -Name complex_args -Scope Global -Value @{
option1 = "option1"
}
$failed = $false
try {
[Ansible.Basic.AnsibleModule]::Create(@(), $spec, @($fragment))
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 1"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.failed | Assert-Equal -Expected $true
$actual.msg.StartsWith("internal error: argument spec entry contains an invalid key 'invalid', valid keys: ") | Assert-Equal -Expected $true
}
"Spec with different list types" = {
$spec = @{
options = @{
# Single element of the same list type not in a list
option1 = @{
aliases = "alias1"
deprecated_aliases = @{name = "alias1"; version = "2.0"; collection_name = "foo.bar" }
}
# Arrays
option2 = @{
aliases = , "alias2"
deprecated_aliases = , @{name = "alias2"; version = "2.0"; collection_name = "foo.bar" }
}
# ArrayList
option3 = @{
aliases = [System.Collections.ArrayList]@("alias3")
deprecated_aliases = [System.Collections.ArrayList]@(@{name = "alias3"; version = "2.0"; collection_name = "foo.bar" })
}
# Generic.List[Object]
option4 = @{
aliases = [System.Collections.Generic.List[Object]]@("alias4")
deprecated_aliases = [System.Collections.Generic.List[Object]]@(@{name = "alias4"; version = "2.0"; collection_name = "foo.bar" })
}
# Generic.List[T]
option5 = @{
aliases = [System.Collections.Generic.List[String]]@("alias5")
deprecated_aliases = [System.Collections.Generic.List[Hashtable]]@()
}
}
}
$spec.options.option5.deprecated_aliases.Add(@{name = "alias5"; version = "2.0"; collection_name = "foo.bar" })
Set-Variable -Name complex_args -Scope Global -Value @{
alias1 = "option1"
alias2 = "option2"
alias3 = "option3"
alias4 = "option4"
alias5 = "option5"
}
$m = [Ansible.Basic.AnsibleModule]::Create(@(), $spec)
$failed = $false
try {
$m.ExitJson()
}
catch [System.Management.Automation.RuntimeException] {
$failed = $true
$_.Exception.Message | Assert-Equal -Expected "exit: 0"
$actual = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
}
$failed | Assert-Equal -Expected $true
$actual.changed | Assert-Equal -Expected $false
$actual.deprecations.Count | Assert-Equal -Expected 5
foreach ($dep in $actual.deprecations) {
$dep.msg -like "Alias 'alias?' is deprecated. See the module docs for more information" | Assert-Equal -Expected $true
$dep.version | Assert-Equal -Expected '2.0'
$dep.collection_name | Assert-Equal -Expected 'foo.bar'
}
$actual.invocation | Assert-DictionaryEqual -Expected @{
module_args = @{
alias1 = "option1"
option1 = "option1"
alias2 = "option2"
option2 = "option2"
alias3 = "option3"
option3 = "option3"
alias4 = "option4"
option4 = "option4"
alias5 = "option5"
option5 = "option5"
}
}
}
}
try {
foreach ($test_impl in $tests.GetEnumerator()) {
# Reset the variables before each test
Set-Variable -Name complex_args -Value @{} -Scope Global
$test = $test_impl.Key
&$test_impl.Value
}
$module.Result.data = "success"
}
catch [System.Management.Automation.RuntimeException] {
$module.Result.failed = $true
$module.Result.test = $test
$module.Result.line = $_.InvocationInfo.ScriptLineNumber
$module.Result.method = $_.InvocationInfo.Line.Trim()
if ($_.Exception.Message.StartSwith("exit: ")) {
# The exception was caused by an unexpected Exit call, log that on the output
$module.Result.output = (ConvertFrom-Json -InputObject $_.Exception.InnerException.Output)
$module.Result.msg = "Uncaught AnsibleModule exit in tests, see output"
}
else {
# Unrelated exception
$module.Result.exception = $_.Exception.ToString()
$module.Result.msg = "Uncaught exception: $(($_ | Out-String).ToString())"
}
}
Exit-Module
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,965 |
TypeError: join() missing 1 required positional argument: 'a' in ansible-galaxy
|
### Summary
```plaintext
# ansible-galaxy install willshersystems.sshd --force -vvv
ansible-galaxy [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible-galaxy
python version = 3.9.16 (main, Sep 8 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
Starting galaxy role install process
Processing role willshersystems.sshd
Opened /root/.ansible/galaxy_token
- downloading role 'sshd', owned by willshersystems
- downloading role from https://github.com/willshersystems/ansible-sshd/archive/v0.21.0.tar.gz
- extracting willshersystems.sshd to /root/.ansible/roles/willshersystems.sshd
[WARNING]: Illegal filename '..': '..' is not allowed
ERROR! Unexpected Exception, this is probably a bug: join() missing 1 required positional argument: 'a'
the full traceback was:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/ansible/cli/__init__.py", line 659, in cli_executor
exit_code = cli.run()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 719, in run
return context.CLIARGS['func']()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 119, in method_wrapper
return wrapped_method(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1370, in execute_install
self._execute_install_role(role_requirements)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1469, in _execute_install_role
installed = role.install()
File "/usr/local/lib/python3.9/site-packages/ansible/galaxy/role.py", line 426, in install
setattr(member, attr, os.path.join(*n_final_parts))
TypeError: join() missing 1 required positional argument: 'a'
```
For some reason, the list `n_final_parts` doesn't contain any entries which makes this call crash:
```python
os.path.join(*n_final_parts)
```
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.9.16 (main, Feb 23 2023, 00:00:00) [GCC 11.3.1 20221121 (Red Hat 11.3.1-4)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = vim
PAGER(env: PAGER) = less
```
### OS / Environment
Amazon Linux 2023
### Steps to Reproduce
not applicable
### Expected Results
I expect the ansible role to be installed
### Actual Results
```console
The installation process crashes, as mentioned above.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81965
|
https://github.com/ansible/ansible/pull/82165
|
b405958f7998efc2e1d03ecf2d22bcd9276b2533
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
| 2023-10-13T08:08:47Z |
python
| 2023-11-30T23:05:48Z |
changelogs/fragments/ansible-galaxy-role-install-symlink.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,965 |
TypeError: join() missing 1 required positional argument: 'a' in ansible-galaxy
|
### Summary
```plaintext
# ansible-galaxy install willshersystems.sshd --force -vvv
ansible-galaxy [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible-galaxy
python version = 3.9.16 (main, Sep 8 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
Starting galaxy role install process
Processing role willshersystems.sshd
Opened /root/.ansible/galaxy_token
- downloading role 'sshd', owned by willshersystems
- downloading role from https://github.com/willshersystems/ansible-sshd/archive/v0.21.0.tar.gz
- extracting willshersystems.sshd to /root/.ansible/roles/willshersystems.sshd
[WARNING]: Illegal filename '..': '..' is not allowed
ERROR! Unexpected Exception, this is probably a bug: join() missing 1 required positional argument: 'a'
the full traceback was:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/ansible/cli/__init__.py", line 659, in cli_executor
exit_code = cli.run()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 719, in run
return context.CLIARGS['func']()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 119, in method_wrapper
return wrapped_method(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1370, in execute_install
self._execute_install_role(role_requirements)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1469, in _execute_install_role
installed = role.install()
File "/usr/local/lib/python3.9/site-packages/ansible/galaxy/role.py", line 426, in install
setattr(member, attr, os.path.join(*n_final_parts))
TypeError: join() missing 1 required positional argument: 'a'
```
For some reason, the list `n_final_parts` doesn't contain any entries which makes this call crash:
```python
os.path.join(*n_final_parts)
```
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.9.16 (main, Feb 23 2023, 00:00:00) [GCC 11.3.1 20221121 (Red Hat 11.3.1-4)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = vim
PAGER(env: PAGER) = less
```
### OS / Environment
Amazon Linux 2023
### Steps to Reproduce
not applicable
### Expected Results
I expect the ansible role to be installed
### Actual Results
```console
The installation process crashes, as mentioned above.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81965
|
https://github.com/ansible/ansible/pull/82165
|
b405958f7998efc2e1d03ecf2d22bcd9276b2533
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
| 2023-10-13T08:08:47Z |
python
| 2023-11-30T23:05:48Z |
lib/ansible/galaxy/role.py
|
########################################################################
#
# (C) 2015, Brian Coca <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
from __future__ import annotations
import errno
import datetime
import functools
import os
import tarfile
import tempfile
from collections.abc import MutableSequence
from shutil import rmtree
from ansible import context
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.user_agent import user_agent
from ansible.module_utils.common.text.converters import to_native, to_text
from ansible.module_utils.common.yaml import yaml_dump, yaml_load
from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.urls import open_url
from ansible.playbook.role.requirement import RoleRequirement
from ansible.utils.display import Display
display = Display()
@functools.cache
def _check_working_data_filter() -> bool:
"""
Check if tarfile.data_filter implementation is working
for the current Python version or not
"""
# Implemented the following code to circumvent broken implementation of data_filter
# in tarfile. See for more information - https://github.com/python/cpython/issues/107845
# deprecated: description='probing broken data filter implementation' python_version='3.11'
ret = False
if hasattr(tarfile, 'data_filter'):
# We explicitly check if tarfile.data_filter is broken or not
ti = tarfile.TarInfo('docs/README.md')
ti.type = tarfile.SYMTYPE
ti.linkname = '../README.md'
try:
tarfile.data_filter(ti, '/foo')
except tarfile.LinkOutsideDestinationError:
pass
else:
ret = True
return ret
class GalaxyRole(object):
SUPPORTED_SCMS = set(['git', 'hg'])
META_MAIN = (os.path.join('meta', 'main.yml'), os.path.join('meta', 'main.yaml'))
META_INSTALL = os.path.join('meta', '.galaxy_install_info')
META_REQUIREMENTS = (os.path.join('meta', 'requirements.yml'), os.path.join('meta', 'requirements.yaml'))
ROLE_DIRS = ('defaults', 'files', 'handlers', 'meta', 'tasks', 'templates', 'vars', 'tests')
def __init__(self, galaxy, api, name, src=None, version=None, scm=None, path=None):
self._metadata = None
self._metadata_dependencies = None
self._requirements = None
self._install_info = None
self._validate_certs = not context.CLIARGS['ignore_certs']
display.debug('Validate TLS certificates: %s' % self._validate_certs)
self.galaxy = galaxy
self._api = api
self.name = name
self.version = version
self.src = src or name
self.download_url = None
self.scm = scm
self.paths = [os.path.join(x, self.name) for x in galaxy.roles_paths]
if path is not None:
if not path.endswith(os.path.join(os.path.sep, self.name)):
path = os.path.join(path, self.name)
else:
# Look for a meta/main.ya?ml inside the potential role dir in case
# the role name is the same as parent directory of the role.
#
# Example:
# ./roles/testing/testing/meta/main.yml
for meta_main in self.META_MAIN:
if os.path.exists(os.path.join(path, name, meta_main)):
path = os.path.join(path, self.name)
break
self.path = path
else:
# use the first path by default
self.path = self.paths[0]
def __repr__(self):
"""
Returns "rolename (version)" if version is not null
Returns "rolename" otherwise
"""
if self.version:
return "%s (%s)" % (self.name, self.version)
else:
return self.name
def __eq__(self, other):
return self.name == other.name
@property
def api(self):
if not isinstance(self._api, GalaxyAPI):
return self._api.api
return self._api
@property
def metadata(self):
"""
Returns role metadata
"""
if self._metadata is None:
for path in self.paths:
for meta_main in self.META_MAIN:
meta_path = os.path.join(path, meta_main)
if os.path.isfile(meta_path):
try:
with open(meta_path, 'r') as f:
self._metadata = yaml_load(f)
except Exception:
display.vvvvv("Unable to load metadata for %s" % self.name)
return False
break
return self._metadata
@property
def metadata_dependencies(self):
"""
Returns a list of dependencies from role metadata
"""
if self._metadata_dependencies is None:
self._metadata_dependencies = []
if self.metadata is not None:
self._metadata_dependencies = self.metadata.get('dependencies') or []
if not isinstance(self._metadata_dependencies, MutableSequence):
raise AnsibleParserError(
f"Expected role dependencies to be a list. Role {self} has meta/main.yml with dependencies {self._metadata_dependencies}"
)
return self._metadata_dependencies
@property
def install_info(self):
"""
Returns role install info
"""
if self._install_info is None:
info_path = os.path.join(self.path, self.META_INSTALL)
if os.path.isfile(info_path):
try:
f = open(info_path, 'r')
self._install_info = yaml_load(f)
except Exception:
display.vvvvv("Unable to load Galaxy install info for %s" % self.name)
return False
finally:
f.close()
return self._install_info
@property
def _exists(self):
for path in self.paths:
if os.path.isdir(path):
return True
return False
def _write_galaxy_install_info(self):
"""
Writes a YAML-formatted file to the role's meta/ directory
(named .galaxy_install_info) which contains some information
we can use later for commands like 'list' and 'info'.
"""
info = dict(
version=self.version,
install_date=datetime.datetime.now(datetime.timezone.utc).strftime("%c"),
)
if not os.path.exists(os.path.join(self.path, 'meta')):
os.makedirs(os.path.join(self.path, 'meta'))
info_path = os.path.join(self.path, self.META_INSTALL)
with open(info_path, 'w+') as f:
try:
self._install_info = yaml_dump(info, f)
except Exception:
return False
return True
def remove(self):
"""
Removes the specified role from the roles path.
There is a sanity check to make sure there's a meta/main.yml file at this
path so the user doesn't blow away random directories.
"""
if self.metadata:
try:
rmtree(self.path)
return True
except Exception:
pass
return False
def fetch(self, role_data):
"""
Downloads the archived role to a temp location based on role data
"""
if role_data:
# first grab the file and save it to a temp location
if self.download_url is not None:
archive_url = self.download_url
elif "github_user" in role_data and "github_repo" in role_data:
archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
else:
archive_url = self.src
display.display("- downloading role from %s" % archive_url)
try:
url_file = open_url(archive_url, validate_certs=self._validate_certs, http_agent=user_agent())
temp_file = tempfile.NamedTemporaryFile(delete=False)
data = url_file.read()
while data:
temp_file.write(data)
data = url_file.read()
temp_file.close()
return temp_file.name
except Exception as e:
display.error(u"failed to download the file: %s" % to_text(e))
return False
def install(self):
if self.scm:
# create tar file from scm url
tmp_file = RoleRequirement.scm_archive_role(keep_scm_meta=context.CLIARGS['keep_scm_meta'], **self.spec)
elif self.src:
if os.path.isfile(self.src):
tmp_file = self.src
elif '://' in self.src:
role_data = self.src
tmp_file = self.fetch(role_data)
else:
role_data = self.api.lookup_role_by_name(self.src)
if not role_data:
raise AnsibleError("- sorry, %s was not found on %s." % (self.src, self.api.api_server))
if role_data.get('role_type') == 'APP':
# Container Role
display.warning("%s is a Container App role, and should only be installed using Ansible "
"Container" % self.name)
role_versions = self.api.fetch_role_related('versions', role_data['id'])
if not self.version:
# convert the version names to LooseVersion objects
# and sort them to get the latest version. If there
# are no versions in the list, we'll grab the head
# of the master branch
if len(role_versions) > 0:
loose_versions = [LooseVersion(a.get('name', None)) for a in role_versions]
try:
loose_versions.sort()
except TypeError:
raise AnsibleError(
'Unable to compare role versions (%s) to determine the most recent version due to incompatible version formats. '
'Please contact the role author to resolve versioning conflicts, or specify an explicit role version to '
'install.' % ', '.join([v.vstring for v in loose_versions])
)
self.version = to_text(loose_versions[-1])
elif role_data.get('github_branch', None):
self.version = role_data['github_branch']
else:
self.version = 'master'
elif self.version != 'master':
if role_versions and to_text(self.version) not in [a.get('name', None) for a in role_versions]:
raise AnsibleError("- the specified version (%s) of %s was not found in the list of available versions (%s)." % (self.version,
self.name,
role_versions))
# check if there's a source link/url for our role_version
for role_version in role_versions:
if role_version['name'] == self.version and 'source' in role_version:
self.src = role_version['source']
if role_version['name'] == self.version and 'download_url' in role_version:
self.download_url = role_version['download_url']
tmp_file = self.fetch(role_data)
else:
raise AnsibleError("No valid role data found")
if tmp_file:
display.debug("installing from %s" % tmp_file)
if not tarfile.is_tarfile(tmp_file):
raise AnsibleError("the downloaded file does not appear to be a valid tar archive.")
else:
role_tar_file = tarfile.open(tmp_file, "r")
# verify the role's meta file
meta_file = None
members = role_tar_file.getmembers()
# next find the metadata file
for member in members:
for meta_main in self.META_MAIN:
if meta_main in member.name:
# Look for parent of meta/main.yml
# Due to possibility of sub roles each containing meta/main.yml
# look for shortest length parent
meta_parent_dir = os.path.dirname(os.path.dirname(member.name))
if not meta_file:
archive_parent_dir = meta_parent_dir
meta_file = member
else:
if len(meta_parent_dir) < len(archive_parent_dir):
archive_parent_dir = meta_parent_dir
meta_file = member
if not meta_file:
raise AnsibleError("this role does not appear to have a meta/main.yml file.")
else:
try:
self._metadata = yaml_load(role_tar_file.extractfile(meta_file))
except Exception:
raise AnsibleError("this role does not appear to have a valid meta/main.yml file.")
paths = self.paths
if self.path != paths[0]:
# path can be passed though __init__
# FIXME should this be done in __init__?
paths[:0] = self.path
paths_len = len(paths)
for idx, path in enumerate(paths):
self.path = path
display.display("- extracting %s to %s" % (self.name, self.path))
try:
if os.path.exists(self.path):
if not os.path.isdir(self.path):
raise AnsibleError("the specified roles path exists and is not a directory.")
elif not context.CLIARGS.get("force", False):
raise AnsibleError("the specified role %s appears to already exist. Use --force to replace it." % self.name)
else:
# using --force, remove the old path
if not self.remove():
raise AnsibleError("%s doesn't appear to contain a role.\n please remove this directory manually if you really "
"want to put the role here." % self.path)
else:
os.makedirs(self.path)
# We strip off any higher-level directories for all of the files
# contained within the tar file here. The default is 'github_repo-target'.
# Gerrit instances, on the other hand, does not have a parent directory at all.
for member in members:
# we only extract files, and remove any relative path
# bits that might be in the file for security purposes
# and drop any containing directory, as mentioned above
if member.isreg() or member.issym():
for attr in ('name', 'linkname'):
attr_value = getattr(member, attr, None)
if not attr_value:
continue
n_attr_value = to_native(attr_value)
n_archive_parent_dir = to_native(archive_parent_dir)
n_parts = n_attr_value.replace(n_archive_parent_dir, "", 1).split(os.sep)
n_final_parts = []
for n_part in n_parts:
# TODO if the condition triggers it produces a broken installation.
# It will create the parent directory as an empty file and will
# explode if the directory contains valid files.
# Leaving this as is since the whole module needs a rewrite.
#
# Check if we have any files with illegal names,
# and display a warning if so. This could help users
# to debug a broken installation.
if not n_part:
continue
if n_part == '..':
display.warning(f"Illegal filename '{n_part}': '..' is not allowed")
continue
if n_part.startswith('~'):
display.warning(f"Illegal filename '{n_part}': names cannot start with '~'")
continue
if '$' in n_part:
display.warning(f"Illegal filename '{n_part}': names cannot contain '$'")
continue
n_final_parts.append(n_part)
setattr(member, attr, os.path.join(*n_final_parts))
if _check_working_data_filter():
# deprecated: description='extract fallback without filter' python_version='3.11'
role_tar_file.extract(member, to_native(self.path), filter='data') # type: ignore[call-arg]
else:
role_tar_file.extract(member, to_native(self.path))
# write out the install info file for later use
self._write_galaxy_install_info()
break
except OSError as e:
if e.errno == errno.EACCES and idx < paths_len - 1:
continue
raise AnsibleError("Could not update files in %s: %s" % (self.path, to_native(e)))
# return the parsed yaml metadata
display.display("- %s was installed successfully" % str(self))
if not (self.src and os.path.isfile(self.src)):
try:
os.unlink(tmp_file)
except (OSError, IOError) as e:
display.warning(u"Unable to remove tmp file (%s): %s" % (tmp_file, to_text(e)))
return True
return False
@property
def spec(self):
"""
Returns role spec info
{
'scm': 'git',
'src': 'http://git.example.com/repos/repo.git',
'version': 'v1.0',
'name': 'repo'
}
"""
return dict(scm=self.scm, src=self.src, version=self.version, name=self.name)
@property
def requirements(self):
"""
Returns role requirements
"""
if self._requirements is None:
self._requirements = []
for meta_requirements in self.META_REQUIREMENTS:
meta_path = os.path.join(self.path, meta_requirements)
if os.path.isfile(meta_path):
try:
f = open(meta_path, 'r')
self._requirements = yaml_load(f)
except Exception:
display.vvvvv("Unable to load requirements for %s" % self.name)
finally:
f.close()
break
if not isinstance(self._requirements, MutableSequence):
raise AnsibleParserError(f"Expected role dependencies to be a list. Role {self} has meta/requirements.yml {self._requirements}")
return self._requirements
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,965 |
TypeError: join() missing 1 required positional argument: 'a' in ansible-galaxy
|
### Summary
```plaintext
# ansible-galaxy install willshersystems.sshd --force -vvv
ansible-galaxy [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible-galaxy
python version = 3.9.16 (main, Sep 8 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
Starting galaxy role install process
Processing role willshersystems.sshd
Opened /root/.ansible/galaxy_token
- downloading role 'sshd', owned by willshersystems
- downloading role from https://github.com/willshersystems/ansible-sshd/archive/v0.21.0.tar.gz
- extracting willshersystems.sshd to /root/.ansible/roles/willshersystems.sshd
[WARNING]: Illegal filename '..': '..' is not allowed
ERROR! Unexpected Exception, this is probably a bug: join() missing 1 required positional argument: 'a'
the full traceback was:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/ansible/cli/__init__.py", line 659, in cli_executor
exit_code = cli.run()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 719, in run
return context.CLIARGS['func']()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 119, in method_wrapper
return wrapped_method(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1370, in execute_install
self._execute_install_role(role_requirements)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1469, in _execute_install_role
installed = role.install()
File "/usr/local/lib/python3.9/site-packages/ansible/galaxy/role.py", line 426, in install
setattr(member, attr, os.path.join(*n_final_parts))
TypeError: join() missing 1 required positional argument: 'a'
```
For some reason, the list `n_final_parts` doesn't contain any entries which makes this call crash:
```python
os.path.join(*n_final_parts)
```
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.9.16 (main, Feb 23 2023, 00:00:00) [GCC 11.3.1 20221121 (Red Hat 11.3.1-4)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = vim
PAGER(env: PAGER) = less
```
### OS / Environment
Amazon Linux 2023
### Steps to Reproduce
not applicable
### Expected Results
I expect the ansible role to be installed
### Actual Results
```console
The installation process crashes, as mentioned above.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81965
|
https://github.com/ansible/ansible/pull/82165
|
b405958f7998efc2e1d03ecf2d22bcd9276b2533
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
| 2023-10-13T08:08:47Z |
python
| 2023-11-30T23:05:48Z |
test/integration/targets/ansible-galaxy-role/files/create-role-archive.py
|
#!/usr/bin/env python
"""Create a role archive which overwrites an arbitrary file."""
from __future__ import annotations
import argparse
import pathlib
import tarfile
import tempfile
def main() -> None:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('archive', type=pathlib.Path, help='archive to create')
parser.add_argument('content', type=pathlib.Path, help='content to write')
parser.add_argument('target', type=pathlib.Path, help='file to overwrite')
args = parser.parse_args()
create_archive(args.archive, args.content, args.target)
def create_archive(archive_path: pathlib.Path, content_path: pathlib.Path, target_path: pathlib.Path) -> None:
with (
tarfile.open(name=archive_path, mode='w') as role_archive,
tempfile.TemporaryDirectory() as temp_dir_name,
):
temp_dir_path = pathlib.Path(temp_dir_name)
meta_main_path = temp_dir_path / 'meta' / 'main.yml'
meta_main_path.parent.mkdir()
meta_main_path.write_text('')
symlink_path = temp_dir_path / 'symlink'
symlink_path.symlink_to(target_path)
role_archive.add(meta_main_path)
role_archive.add(symlink_path)
content_tarinfo = role_archive.gettarinfo(content_path, str(symlink_path))
with content_path.open('rb') as content_file:
role_archive.addfile(content_tarinfo, content_file)
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,965 |
TypeError: join() missing 1 required positional argument: 'a' in ansible-galaxy
|
### Summary
```plaintext
# ansible-galaxy install willshersystems.sshd --force -vvv
ansible-galaxy [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible-galaxy
python version = 3.9.16 (main, Sep 8 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
Starting galaxy role install process
Processing role willshersystems.sshd
Opened /root/.ansible/galaxy_token
- downloading role 'sshd', owned by willshersystems
- downloading role from https://github.com/willshersystems/ansible-sshd/archive/v0.21.0.tar.gz
- extracting willshersystems.sshd to /root/.ansible/roles/willshersystems.sshd
[WARNING]: Illegal filename '..': '..' is not allowed
ERROR! Unexpected Exception, this is probably a bug: join() missing 1 required positional argument: 'a'
the full traceback was:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/ansible/cli/__init__.py", line 659, in cli_executor
exit_code = cli.run()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 719, in run
return context.CLIARGS['func']()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 119, in method_wrapper
return wrapped_method(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1370, in execute_install
self._execute_install_role(role_requirements)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1469, in _execute_install_role
installed = role.install()
File "/usr/local/lib/python3.9/site-packages/ansible/galaxy/role.py", line 426, in install
setattr(member, attr, os.path.join(*n_final_parts))
TypeError: join() missing 1 required positional argument: 'a'
```
For some reason, the list `n_final_parts` doesn't contain any entries which makes this call crash:
```python
os.path.join(*n_final_parts)
```
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.9.16 (main, Feb 23 2023, 00:00:00) [GCC 11.3.1 20221121 (Red Hat 11.3.1-4)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = vim
PAGER(env: PAGER) = less
```
### OS / Environment
Amazon Linux 2023
### Steps to Reproduce
not applicable
### Expected Results
I expect the ansible role to be installed
### Actual Results
```console
The installation process crashes, as mentioned above.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81965
|
https://github.com/ansible/ansible/pull/82165
|
b405958f7998efc2e1d03ecf2d22bcd9276b2533
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
| 2023-10-13T08:08:47Z |
python
| 2023-11-30T23:05:48Z |
test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml
|
- name: create test directories
file:
path: '{{ remote_tmp_dir }}/dir-traversal/{{ item }}'
state: directory
loop:
- source
- target
- roles
- name: create test content
copy:
dest: '{{ remote_tmp_dir }}/dir-traversal/source/content.txt'
content: |
some content to write
- name: build dangerous dir traversal role
script:
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
cmd: create-role-archive.py dangerous.tar content.txt {{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt
executable: '{{ ansible_playbook_python }}'
- name: install dangerous role
command:
cmd: ansible-galaxy role install --roles-path '{{ remote_tmp_dir }}/dir-traversal/roles' dangerous.tar
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
ignore_errors: true
register: galaxy_install_dangerous
- name: check for overwritten file
stat:
path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt'
register: dangerous_overwrite_stat
- name: get overwritten content
slurp:
path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt'
register: dangerous_overwrite_content
when: dangerous_overwrite_stat.stat.exists
- assert:
that:
- dangerous_overwrite_content.content|default('')|b64decode == ''
- not dangerous_overwrite_stat.stat.exists
- galaxy_install_dangerous is failed
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,965 |
TypeError: join() missing 1 required positional argument: 'a' in ansible-galaxy
|
### Summary
```plaintext
# ansible-galaxy install willshersystems.sshd --force -vvv
ansible-galaxy [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible-galaxy
python version = 3.9.16 (main, Sep 8 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
Starting galaxy role install process
Processing role willshersystems.sshd
Opened /root/.ansible/galaxy_token
- downloading role 'sshd', owned by willshersystems
- downloading role from https://github.com/willshersystems/ansible-sshd/archive/v0.21.0.tar.gz
- extracting willshersystems.sshd to /root/.ansible/roles/willshersystems.sshd
[WARNING]: Illegal filename '..': '..' is not allowed
ERROR! Unexpected Exception, this is probably a bug: join() missing 1 required positional argument: 'a'
the full traceback was:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/ansible/cli/__init__.py", line 659, in cli_executor
exit_code = cli.run()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 719, in run
return context.CLIARGS['func']()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 119, in method_wrapper
return wrapped_method(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1370, in execute_install
self._execute_install_role(role_requirements)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1469, in _execute_install_role
installed = role.install()
File "/usr/local/lib/python3.9/site-packages/ansible/galaxy/role.py", line 426, in install
setattr(member, attr, os.path.join(*n_final_parts))
TypeError: join() missing 1 required positional argument: 'a'
```
For some reason, the list `n_final_parts` doesn't contain any entries which makes this call crash:
```python
os.path.join(*n_final_parts)
```
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.9.16 (main, Feb 23 2023, 00:00:00) [GCC 11.3.1 20221121 (Red Hat 11.3.1-4)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = vim
PAGER(env: PAGER) = less
```
### OS / Environment
Amazon Linux 2023
### Steps to Reproduce
not applicable
### Expected Results
I expect the ansible role to be installed
### Actual Results
```console
The installation process crashes, as mentioned above.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81965
|
https://github.com/ansible/ansible/pull/82165
|
b405958f7998efc2e1d03ecf2d22bcd9276b2533
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
| 2023-10-13T08:08:47Z |
python
| 2023-11-30T23:05:48Z |
test/integration/targets/ansible-galaxy-role/tasks/main.yml
|
- name: Install role from Galaxy (should not fail with AttributeError)
command: ansible-galaxy role install ansible.nope -vvvv --ignore-errors
- name: Archive directories
file:
state: directory
path: "{{ remote_tmp_dir }}/role.d/{{item}}"
loop:
- meta
- tasks
- name: Metadata file
copy:
content: "'galaxy_info': {}"
dest: "{{ remote_tmp_dir }}/role.d/meta/main.yml"
- name: Valid files
copy:
content: ""
dest: "{{ remote_tmp_dir }}/role.d/tasks/{{item}}"
loop:
- "main.yml"
- "valid~file.yml"
- name: Valid role archive
command: "tar cf {{ remote_tmp_dir }}/valid-role.tar {{ remote_tmp_dir }}/role.d"
- name: Invalid file
copy:
content: ""
dest: "{{ remote_tmp_dir }}/role.d/tasks/~invalid.yml"
- name: Invalid file
copy:
content: ""
dest: "{{ remote_tmp_dir }}/role.d/tasks/invalid$name.yml"
- name: Valid requirements file
copy:
dest: valid-requirements.yml
content: "[{'src': '{{ remote_tmp_dir }}/valid-role.tar', 'name': 'valid-testrole'}]"
- name: Invalid role archive
command: "tar cf {{ remote_tmp_dir }}/invalid-role.tar {{ remote_tmp_dir }}/role.d"
- name: Invalid requirements file
copy:
dest: invalid-requirements.yml
content: "[{'src': '{{ remote_tmp_dir }}/invalid-role.tar', 'name': 'invalid-testrole'}]"
- name: Install valid role
command: ansible-galaxy install -r valid-requirements.yml
- name: Uninstall valid role
command: ansible-galaxy role remove valid-testrole
- name: Install invalid role
command: ansible-galaxy install -r invalid-requirements.yml
ignore_errors: yes
register: invalid
- assert:
that: "invalid.rc != 0"
- name: Uninstall invalid role
command: ansible-galaxy role remove invalid-testrole
- import_tasks: dir-traversal.yml
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 81,965 |
TypeError: join() missing 1 required positional argument: 'a' in ansible-galaxy
|
### Summary
```plaintext
# ansible-galaxy install willshersystems.sshd --force -vvv
ansible-galaxy [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible-galaxy
python version = 3.9.16 (main, Sep 8 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
Starting galaxy role install process
Processing role willshersystems.sshd
Opened /root/.ansible/galaxy_token
- downloading role 'sshd', owned by willshersystems
- downloading role from https://github.com/willshersystems/ansible-sshd/archive/v0.21.0.tar.gz
- extracting willshersystems.sshd to /root/.ansible/roles/willshersystems.sshd
[WARNING]: Illegal filename '..': '..' is not allowed
ERROR! Unexpected Exception, this is probably a bug: join() missing 1 required positional argument: 'a'
the full traceback was:
Traceback (most recent call last):
File "/usr/local/lib/python3.9/site-packages/ansible/cli/__init__.py", line 659, in cli_executor
exit_code = cli.run()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 719, in run
return context.CLIARGS['func']()
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 119, in method_wrapper
return wrapped_method(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1370, in execute_install
self._execute_install_role(role_requirements)
File "/usr/local/lib/python3.9/site-packages/ansible/cli/galaxy.py", line 1469, in _execute_install_role
installed = role.install()
File "/usr/local/lib/python3.9/site-packages/ansible/galaxy/role.py", line 426, in install
setattr(member, attr, os.path.join(*n_final_parts))
TypeError: join() missing 1 required positional argument: 'a'
```
For some reason, the list `n_final_parts` doesn't contain any entries which makes this call crash:
```python
os.path.join(*n_final_parts)
```
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
$ ansible --version
ansible [core 2.15.5]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/local/bin/ansible
python version = 3.9.16 (main, Feb 23 2023, 00:00:00) [GCC 11.3.1 20221121 (Red Hat 11.3.1-4)] (/usr/bin/python3)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = vim
PAGER(env: PAGER) = less
```
### OS / Environment
Amazon Linux 2023
### Steps to Reproduce
not applicable
### Expected Results
I expect the ansible role to be installed
### Actual Results
```console
The installation process crashes, as mentioned above.
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/81965
|
https://github.com/ansible/ansible/pull/82165
|
b405958f7998efc2e1d03ecf2d22bcd9276b2533
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
| 2023-10-13T08:08:47Z |
python
| 2023-11-30T23:05:48Z |
test/integration/targets/ansible-galaxy-role/tasks/valid-role-symlinks.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,264 |
delegate_to: "{{var}}" when: var != "" causes a "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead" error on 2.16
|
### Summary
I have a task to conditionally install an SSH key on another machine:
```yaml
- authorized_key: ...
delegate_to: "{{ jenkins_install_key_on }}"
when: jenkins_install_key_on != ""
```
This works fine when `jenkins_install_key_on` is set to a non-blank value, but fails when `jenkins_install_key_on` is set to an empty string:
```
fatal: [jammy -> {{ jenkins_install_key_on }}]: FAILED! => {"msg": "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead"}
```
It used to work with ansible-core 2.15 and older.
### Issue Type
Bug Report
### Component Name
delegate_to
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = /home/mg/src/deployments/provisioning/ansible.cfg
configured module search path = ['/home/mg/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible
ansible collection location = /home/mg/.ansible/collections:/usr/share/ansible/collections
executable location = /home/mg/.local/bin/ansible
python version = 3.11.6 (main, Oct 8 2023, 05:06:43) [GCC 13.2.0] (/home/mg/.local/pipx/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ACTION_WARNINGS(/home/mg/src/deployments/provisioning/ansible.cfg) = False
CACHE_PLUGIN(/home/mg/src/deployments/provisioning/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/home/mg/src/deployments/provisioning/ansible.cfg) = .cache/facts/
CACHE_PLUGIN_TIMEOUT(/home/mg/src/deployments/provisioning/ansible.cfg) = 86400
CALLBACKS_ENABLED(/home/mg/src/deployments/provisioning/ansible.cfg) = ['fancy_html']
CONFIG_FILE() = /home/mg/src/deployments/provisioning/ansible.cfg
DEFAULT_FORKS(/home/mg/src/deployments/provisioning/ansible.cfg) = 15
DEFAULT_GATHERING(/home/mg/src/deployments/provisioning/ansible.cfg) = smart
DEFAULT_HOST_LIST(/home/mg/src/deployments/provisioning/ansible.cfg) = ['/home/mg/src/deployments/provisioning/inventory']
DEFAULT_LOG_PATH(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/.cache/ansible.log
DEFAULT_REMOTE_USER(/home/mg/src/deployments/provisioning/ansible.cfg) = root
DEFAULT_STDOUT_CALLBACK(/home/mg/src/deployments/provisioning/ansible.cfg) = yaml
DEFAULT_VAULT_PASSWORD_FILE(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/askpas>
EDITOR(env: EDITOR) = vim
INTERPRETER_PYTHON(/home/mg/src/deployments/provisioning/ansible.cfg) = python3
RETRY_FILES_ENABLED(/home/mg/src/deployments/provisioning/ansible.cfg) = False
CACHE:
=====
jsonfile:
________
_timeout(/home/mg/src/deployments/provisioning/ansible.cfg) = 86400
_uri(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/.cache/facts
CONNECTION:
==========
paramiko_ssh:
____________
remote_user(/home/mg/src/deployments/provisioning/ansible.cfg) = root
ssh:
___
remote_user(/home/mg/src/deployments/provisioning/ansible.cfg) = root
```
### OS / Environment
Ubuntu 23.10
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- hosts: all
gather_facts: no
tasks:
- debug: msg="hello"
delegate_to: "{{ var }}"
when: var != ""
vars:
var: ""
```
### Expected Results
I expect the task to be skipped.
### Actual Results
```console
$ ansible-playbook -i localhost, ansible-delegate-to-blank.yml -vvvv
ansible-playbook [core 2.16.0]
config file = None
configured module search path = ['/home/mg/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible
ansible collection location = /home/mg/.ansible/collections:/usr/share/ansible/collections
executable location = /home/mg/.local/bin/ansible-playbook
python version = 3.11.6 (main, Oct 8 2023, 05:06:43) [GCC 13.2.0] (/home/mg/.local/pipx/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
setting up inventory plugins
Loading collection ansible.builtin from
Set default localhost to localhost
Parsed localhost, inventory source with host_list plugin
Loading callback plugin default of type stdout, v2.0 from /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible/plugins/callback/default.py
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: ansible-delegate-to-blank.yml **************************************************************************************
Positional arguments: ansible-delegate-to-blank.yml
verbosity: 4
connection: ssh
become_method: sudo
tags: ('all',)
inventory: ('localhost,',)
forks: 5
1 plays in ansible-delegate-to-blank.yml
PLAY [all] *******************************************************************************************************************
TASK [debug] *****************************************************************************************************************
task path: /home/mg/tmp/ansible-delegate-to-blank.yml:4
fatal: [localhost -> {{ var }}]: FAILED! => {
"msg": "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead"
}
PLAY RECAP *******************************************************************************************************************
localhost : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82264
|
https://github.com/ansible/ansible/pull/82319
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
|
6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0
| 2023-11-22T08:22:01Z |
python
| 2023-12-04T15:19:12Z |
changelogs/fragments/delegate_to_invalid.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,264 |
delegate_to: "{{var}}" when: var != "" causes a "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead" error on 2.16
|
### Summary
I have a task to conditionally install an SSH key on another machine:
```yaml
- authorized_key: ...
delegate_to: "{{ jenkins_install_key_on }}"
when: jenkins_install_key_on != ""
```
This works fine when `jenkins_install_key_on` is set to a non-blank value, but fails when `jenkins_install_key_on` is set to an empty string:
```
fatal: [jammy -> {{ jenkins_install_key_on }}]: FAILED! => {"msg": "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead"}
```
It used to work with ansible-core 2.15 and older.
### Issue Type
Bug Report
### Component Name
delegate_to
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = /home/mg/src/deployments/provisioning/ansible.cfg
configured module search path = ['/home/mg/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible
ansible collection location = /home/mg/.ansible/collections:/usr/share/ansible/collections
executable location = /home/mg/.local/bin/ansible
python version = 3.11.6 (main, Oct 8 2023, 05:06:43) [GCC 13.2.0] (/home/mg/.local/pipx/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ACTION_WARNINGS(/home/mg/src/deployments/provisioning/ansible.cfg) = False
CACHE_PLUGIN(/home/mg/src/deployments/provisioning/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/home/mg/src/deployments/provisioning/ansible.cfg) = .cache/facts/
CACHE_PLUGIN_TIMEOUT(/home/mg/src/deployments/provisioning/ansible.cfg) = 86400
CALLBACKS_ENABLED(/home/mg/src/deployments/provisioning/ansible.cfg) = ['fancy_html']
CONFIG_FILE() = /home/mg/src/deployments/provisioning/ansible.cfg
DEFAULT_FORKS(/home/mg/src/deployments/provisioning/ansible.cfg) = 15
DEFAULT_GATHERING(/home/mg/src/deployments/provisioning/ansible.cfg) = smart
DEFAULT_HOST_LIST(/home/mg/src/deployments/provisioning/ansible.cfg) = ['/home/mg/src/deployments/provisioning/inventory']
DEFAULT_LOG_PATH(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/.cache/ansible.log
DEFAULT_REMOTE_USER(/home/mg/src/deployments/provisioning/ansible.cfg) = root
DEFAULT_STDOUT_CALLBACK(/home/mg/src/deployments/provisioning/ansible.cfg) = yaml
DEFAULT_VAULT_PASSWORD_FILE(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/askpas>
EDITOR(env: EDITOR) = vim
INTERPRETER_PYTHON(/home/mg/src/deployments/provisioning/ansible.cfg) = python3
RETRY_FILES_ENABLED(/home/mg/src/deployments/provisioning/ansible.cfg) = False
CACHE:
=====
jsonfile:
________
_timeout(/home/mg/src/deployments/provisioning/ansible.cfg) = 86400
_uri(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/.cache/facts
CONNECTION:
==========
paramiko_ssh:
____________
remote_user(/home/mg/src/deployments/provisioning/ansible.cfg) = root
ssh:
___
remote_user(/home/mg/src/deployments/provisioning/ansible.cfg) = root
```
### OS / Environment
Ubuntu 23.10
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- hosts: all
gather_facts: no
tasks:
- debug: msg="hello"
delegate_to: "{{ var }}"
when: var != ""
vars:
var: ""
```
### Expected Results
I expect the task to be skipped.
### Actual Results
```console
$ ansible-playbook -i localhost, ansible-delegate-to-blank.yml -vvvv
ansible-playbook [core 2.16.0]
config file = None
configured module search path = ['/home/mg/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible
ansible collection location = /home/mg/.ansible/collections:/usr/share/ansible/collections
executable location = /home/mg/.local/bin/ansible-playbook
python version = 3.11.6 (main, Oct 8 2023, 05:06:43) [GCC 13.2.0] (/home/mg/.local/pipx/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
setting up inventory plugins
Loading collection ansible.builtin from
Set default localhost to localhost
Parsed localhost, inventory source with host_list plugin
Loading callback plugin default of type stdout, v2.0 from /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible/plugins/callback/default.py
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: ansible-delegate-to-blank.yml **************************************************************************************
Positional arguments: ansible-delegate-to-blank.yml
verbosity: 4
connection: ssh
become_method: sudo
tags: ('all',)
inventory: ('localhost,',)
forks: 5
1 plays in ansible-delegate-to-blank.yml
PLAY [all] *******************************************************************************************************************
TASK [debug] *****************************************************************************************************************
task path: /home/mg/tmp/ansible-delegate-to-blank.yml:4
fatal: [localhost -> {{ var }}]: FAILED! => {
"msg": "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead"
}
PLAY RECAP *******************************************************************************************************************
localhost : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82264
|
https://github.com/ansible/ansible/pull/82319
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
|
6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0
| 2023-11-22T08:22:01Z |
python
| 2023-12-04T15:19:12Z |
lib/ansible/executor/task_executor.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import pty
import time
import json
import signal
import subprocess
import sys
import termios
import traceback
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleConnectionFailure, AnsibleActionFail, AnsibleActionSkip
from ansible.executor.task_result import TaskResult
from ansible.executor.module_common import get_action_args_with_defaults
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import binary_type
from ansible.module_utils.common.text.converters import to_text, to_native
from ansible.module_utils.connection import write_to_file_descriptor
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.plugins import get_plugin_class
from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader
from ansible.template import Templar
from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var
from ansible.vars.clean import namespace_facts, clean_facts
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars, isidentifier
display = Display()
RETURN_VARS = [x for x in C.MAGIC_VARIABLE_MAPPING.items() if 'become' not in x and '_pass' not in x]
__all__ = ['TaskExecutor']
class TaskTimeoutError(BaseException):
pass
def task_timeout(signum, frame):
raise TaskTimeoutError
def remove_omit(task_args, omit_token):
'''
Remove args with a value equal to the ``omit_token`` recursively
to align with now having suboptions in the argument_spec
'''
if not isinstance(task_args, dict):
return task_args
new_args = {}
for i in task_args.items():
if i[1] == omit_token:
continue
elif isinstance(i[1], dict):
new_args[i[0]] = remove_omit(i[1], omit_token)
elif isinstance(i[1], list):
new_args[i[0]] = [remove_omit(v, omit_token) for v in i[1]]
else:
new_args[i[0]] = i[1]
return new_args
class TaskExecutor:
'''
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, final_q, variable_manager):
self._host = host
self._task = task
self._job_vars = job_vars
self._play_context = play_context
self._new_stdin = new_stdin
self._loader = loader
self._shared_loader_obj = shared_loader_obj
self._connection = None
self._final_q = final_q
self._variable_manager = variable_manager
self._loop_eval_error = None
self._task.squash()
def run(self):
'''
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with self._run_loop()
or self._execute(). After that, the returned results are parsed and
returned as a dict.
'''
display.debug("in run() - task %s" % self._task._uuid)
try:
try:
items = self._get_loop_items()
except AnsibleUndefinedVariable as e:
# save the error raised here for use later
items = None
self._loop_eval_error = e
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
# create the overall result item
res = dict(results=item_results)
# loop through the item results and set the global changed/failed/skipped result flags based on any item.
res['skipped'] = True
for item in item_results:
if 'changed' in item and item['changed'] and not res.get('changed'):
res['changed'] = True
if res['skipped'] and ('skipped' not in item or ('skipped' in item and not item['skipped'])):
res['skipped'] = False
if 'failed' in item and item['failed']:
item_ignore = item.pop('_ansible_ignore_errors')
if not res.get('failed'):
res['failed'] = True
res['msg'] = 'One or more items failed'
self._task.ignore_errors = item_ignore
elif self._task.ignore_errors and not item_ignore:
self._task.ignore_errors = item_ignore
if 'unreachable' in item and item['unreachable']:
item_ignore_unreachable = item.pop('_ansible_ignore_unreachable')
if not res.get('unreachable'):
self._task.ignore_unreachable = item_ignore_unreachable
elif self._task.ignore_unreachable and not item_ignore_unreachable:
self._task.ignore_unreachable = item_ignore_unreachable
# ensure to accumulate these
for array in ['warnings', 'deprecations']:
if array in item and item[array]:
if array not in res:
res[array] = []
if not isinstance(item[array], list):
item[array] = [item[array]]
res[array] = res[array] + item[array]
del item[array]
if not res.get('failed', False):
res['msg'] = 'All items completed'
if res['skipped']:
res['msg'] = 'All items skipped'
else:
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
else:
display.debug("calling self._execute()")
res = self._execute()
display.debug("_execute() done")
# make sure changed is set in the result, if it's not present
if 'changed' not in res:
res['changed'] = False
def _clean_res(res, errors='surrogate_or_strict'):
if isinstance(res, binary_type):
return to_unsafe_text(res, errors=errors)
elif isinstance(res, dict):
for k in res:
try:
res[k] = _clean_res(res[k], errors=errors)
except UnicodeError:
if k == 'diff':
# If this is a diff, substitute a replacement character if the value
# is undecodable as utf8. (Fix #21804)
display.warning("We were unable to decode all characters in the module return data."
" Replaced some in an effort to return as much as possible")
res[k] = _clean_res(res[k], errors='surrogate_then_replace')
else:
raise
elif isinstance(res, list):
for idx, item in enumerate(res):
res[idx] = _clean_res(item, errors=errors)
return res
display.debug("dumping result to json")
res = _clean_res(res)
display.debug("done dumping result, returning")
return res
except AnsibleError as e:
return dict(failed=True, msg=wrap_var(to_text(e, nonstring='simplerepr')), _ansible_no_log=self._play_context.no_log)
except Exception as e:
return dict(failed=True, msg=wrap_var('Unexpected failure during module execution: %s' % (to_native(e, nonstring='simplerepr'))),
exception=to_text(traceback.format_exc()), stdout='', _ansible_no_log=self._play_context.no_log)
finally:
try:
self._connection.close()
except AttributeError:
pass
except Exception as e:
display.debug(u"error closing connection: %s" % to_text(e))
def _get_loop_items(self):
'''
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
# get search path for this task to pass to lookup plugins
self._job_vars['ansible_search_path'] = self._task.get_search_path()
# ensure basedir is always in (dwim already searches here but we need to display it)
if self._loader.get_basedir() not in self._job_vars['ansible_search_path']:
self._job_vars['ansible_search_path'].append(self._loader.get_basedir())
templar = Templar(loader=self._loader, variables=self._job_vars)
items = None
if self._task.loop_with:
if self._task.loop_with in self._shared_loader_obj.lookup_loader:
# TODO: hardcoded so it fails for non first_found lookups, but this should be generalized for those that don't do their own templating
# lookup prop/attribute?
fail = bool(self._task.loop_with != 'first_found')
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop, templar=templar, fail_on_undefined=fail, convert_bare=False)
# get lookup
mylookup = self._shared_loader_obj.lookup_loader.get(self._task.loop_with, loader=self._loader, templar=templar)
# give lookup task 'context' for subdir (mostly needed for first_found)
for subdir in ['template', 'var', 'file']: # TODO: move this to constants?
if subdir in self._task.action:
break
setattr(mylookup, '_subdir', subdir + 's')
# run lookup
items = wrap_var(mylookup.run(terms=loop_terms, variables=self._job_vars, wantlist=True))
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop_with)
elif self._task.loop is not None:
items = templar.template(self._task.loop)
if not isinstance(items, list):
raise AnsibleError(
"Invalid data passed to 'loop', it requires a list, got this instead: %s."
" Hint: If you passed a list/dict of just one element,"
" try adding wantlist=True to your lookup invocation or use q/query instead of lookup." % items
)
return items
def _run_loop(self, items):
'''
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
task_vars = self._job_vars
templar = Templar(loader=self._loader, variables=task_vars)
self._task.loop_control.post_validate(templar=templar)
loop_var = self._task.loop_control.loop_var
index_var = self._task.loop_control.index_var
loop_pause = self._task.loop_control.pause
extended = self._task.loop_control.extended
extended_allitems = self._task.loop_control.extended_allitems
# ensure we always have a label
label = self._task.loop_control.label or '{{' + loop_var + '}}'
if loop_var in task_vars:
display.warning(u"%s: The loop variable '%s' is already in use. "
u"You should set the `loop_var` value in the `loop_control` option for the task"
u" to something else to avoid variable collisions and unexpected behavior." % (self._task, loop_var))
ran_once = False
task_fields = None
no_log = False
items_len = len(items)
results = []
for item_index, item in enumerate(items):
task_vars['ansible_loop_var'] = loop_var
task_vars[loop_var] = item
if index_var:
task_vars['ansible_index_var'] = index_var
task_vars[index_var] = item_index
if extended:
task_vars['ansible_loop'] = {
'index': item_index + 1,
'index0': item_index,
'first': item_index == 0,
'last': item_index + 1 == items_len,
'length': items_len,
'revindex': items_len - item_index,
'revindex0': items_len - item_index - 1,
}
if extended_allitems:
task_vars['ansible_loop']['allitems'] = items
try:
task_vars['ansible_loop']['nextitem'] = items[item_index + 1]
except IndexError:
pass
if item_index - 1 >= 0:
task_vars['ansible_loop']['previtem'] = items[item_index - 1]
# Update template vars to reflect current loop iteration
templar.available_variables = task_vars
# pause between loop iterations
if loop_pause and ran_once:
time.sleep(loop_pause)
else:
ran_once = True
try:
tmp_task = self._task.copy(exclude_parent=True, exclude_tasks=True)
tmp_task._parent = self._task._parent
tmp_play_context = self._play_context.copy()
except AnsibleParserError as e:
results.append(dict(failed=True, msg=to_text(e)))
continue
# now we swap the internal task and play context with their copies,
# execute, and swap them back so we can do the next iteration cleanly
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
res = self._execute(variables=task_vars)
task_fields = self._task.dump_attrs()
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
# update 'general no_log' based on specific no_log
no_log = no_log or tmp_task.no_log
# now update the result with the item info, and append the result
# to the list of results
res[loop_var] = item
res['ansible_loop_var'] = loop_var
if index_var:
res[index_var] = item_index
res['ansible_index_var'] = index_var
if extended:
res['ansible_loop'] = task_vars['ansible_loop']
res['_ansible_item_result'] = True
res['_ansible_ignore_errors'] = task_fields.get('ignore_errors')
res['_ansible_ignore_unreachable'] = task_fields.get('ignore_unreachable')
# gets templated here unlike rest of loop_control fields, depends on loop_var above
try:
res['_ansible_item_label'] = templar.template(label)
except AnsibleUndefinedVariable as e:
res.update({
'failed': True,
'msg': 'Failed to template loop_control.label: %s' % to_text(e)
})
tr = TaskResult(
self._host.name,
self._task._uuid,
res,
task_fields=task_fields,
)
if tr.is_failed() or tr.is_unreachable():
self._final_q.send_callback('v2_runner_item_on_failed', tr)
elif tr.is_skipped():
self._final_q.send_callback('v2_runner_item_on_skipped', tr)
else:
if getattr(self._task, 'diff', False):
self._final_q.send_callback('v2_on_file_diff', tr)
if self._task.action not in C._ACTION_INVENTORY_TASKS:
self._final_q.send_callback('v2_runner_item_on_ok', tr)
results.append(res)
del task_vars[loop_var]
# clear 'connection related' plugin variables for next iteration
if self._connection:
clear_plugins = {
'connection': self._connection._load_name,
'shell': self._connection._shell._load_name
}
if self._connection.become:
clear_plugins['become'] = self._connection.become._load_name
for plugin_type, plugin_name in clear_plugins.items():
for var in C.config.get_plugin_vars(plugin_type, plugin_name):
if var in task_vars and var not in self._job_vars:
del task_vars[var]
self._task.no_log = no_log
# NOTE: run_once cannot contain loop vars because it's templated earlier also
# This is saving the post-validated field from the last loop so the strategy can use the templated value post task execution
self._task.run_once = task_fields.get('run_once')
self._task.action = task_fields.get('action')
return results
def _calculate_delegate_to(self, templar, variables):
"""This method is responsible for effectively pre-validating Task.delegate_to and will
happen before Task.post_validate is executed
"""
delegated_vars, delegated_host_name = self._variable_manager.get_delegated_vars_and_hostname(
templar,
self._task,
variables
)
# At the point this is executed it is safe to mutate self._task,
# since `self._task` is either a copy referred to by `tmp_task` in `_run_loop`
# or just a singular non-looped task
if delegated_host_name:
self._task.delegate_to = delegated_host_name
variables.update(delegated_vars)
def _execute(self, variables=None):
'''
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
if variables is None:
variables = self._job_vars
templar = Templar(loader=self._loader, variables=variables)
self._calculate_delegate_to(templar, variables)
context_validation_error = None
# a certain subset of variables exist.
tempvars = variables.copy()
try:
# TODO: remove play_context as this does not take delegation nor loops correctly into account,
# the task itself should hold the correct values for connection/shell/become/terminal plugin options to finalize.
# Kept for now for backwards compatibility and a few functions that are still exclusive to it.
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
self._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=variables, templar=templar)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
self._play_context.post_validate(templar=templar)
# now that the play context is finalized, if the remote_addr is not set
# default to using the host's address field as the remote address
if not self._play_context.remote_addr:
self._play_context.remote_addr = self._host.address
# We also add "magic" variables back into the variables dict to make sure
self._play_context.update_vars(tempvars)
except AnsibleError as e:
# save the error, which we'll raise later if we don't end up
# skipping this task during the conditional evaluation step
context_validation_error = e
no_log = self._play_context.no_log
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
try:
conditional_result, false_condition = self._task.evaluate_conditional_with_result(templar, tempvars)
if not conditional_result:
display.debug("when evaluation is False, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional result was False',
false_condition=false_condition, _ansible_no_log=no_log)
except AnsibleError as e:
# loop error takes precedence
if self._loop_eval_error is not None:
# Display the error from the conditional as well to prevent
# losing information useful for debugging.
display.v(to_text(e))
raise self._loop_eval_error # pylint: disable=raising-bad-type
raise
# Not skipping, if we had loop error raised earlier we need to raise it now to halt the execution of this task
if self._loop_eval_error is not None:
raise self._loop_eval_error # pylint: disable=raising-bad-type
# if we ran into an error while setting up the PlayContext, raise it now, unless is known issue with delegation
# and undefined vars (correct values are in cvars later on and connection plugins, if still error, blows up there)
if context_validation_error is not None:
raiseit = True
if self._task.delegate_to:
if isinstance(context_validation_error, AnsibleUndefinedVariable):
raiseit = False
elif isinstance(context_validation_error, AnsibleParserError):
# parser error, might be cause by undef too
orig_exc = getattr(context_validation_error, 'orig_exc', None)
if isinstance(orig_exc, AnsibleUndefinedVariable):
raiseit = False
if raiseit:
raise context_validation_error # pylint: disable=raising-bad-type
# set templar to use temp variables until loop is evaluated
templar.available_variables = tempvars
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
if self._task.action in C._ACTION_INCLUDE_TASKS:
include_args = self._task.args.copy()
include_file = include_args.pop('_raw_params', None)
if not include_file:
return dict(failed=True, msg="No include file was specified to the include")
include_file = templar.template(include_file)
return dict(include=include_file, include_args=include_args)
# if this task is a IncludeRole, we just return now with a success code so the main thread can expand the task list for the given host
elif self._task.action in C._ACTION_INCLUDE_ROLE:
include_args = self._task.args.copy()
return dict(include_args=include_args)
# Now we do final validation on the task, which sets all fields to their final values.
try:
self._task.post_validate(templar=templar)
except AnsibleError:
raise
except Exception:
return dict(changed=False, failed=True, _ansible_no_log=no_log, exception=to_text(traceback.format_exc()))
if '_variable_params' in self._task.args:
variable_params = self._task.args.pop('_variable_params')
if isinstance(variable_params, dict):
if C.INJECT_FACTS_AS_VARS:
display.warning("Using a variable for a task's 'args' is unsafe in some situations "
"(see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-unsafe)")
variable_params.update(self._task.args)
self._task.args = variable_params
else:
# if we didn't get a dict, it means there's garbage remaining after k=v parsing, just give up
# see https://github.com/ansible/ansible/issues/79862
raise AnsibleError(f"invalid or malformed argument: '{variable_params}'")
# update no_log to task value, now that we have it templated
no_log = self._task.no_log
# free tempvars up, not used anymore, cvars and vars_copy should be mainly used after this point
# updating the original 'variables' at the end
tempvars = {}
# setup cvars copy, used for all connection related templating
if self._task.delegate_to:
# use vars from delegated host (which already include task vars) instead of original host
cvars = variables.get('ansible_delegated_vars', {}).get(self._task.delegate_to, {})
else:
# just use normal host vars
cvars = variables
templar.available_variables = cvars
# use magic var if it exists, if not, let task inheritance do it's thing.
if cvars.get('ansible_connection') is not None:
current_connection = templar.template(cvars['ansible_connection'])
else:
current_connection = self._task.connection
# get the connection and the handler for this execution
if (not self._connection or
not getattr(self._connection, 'connected', False) or
not self._connection.matches_name([current_connection]) or
# pc compare, left here for old plugins, but should be irrelevant for those
# using get_option, since they are cleared each iteration.
self._play_context.remote_addr != self._connection._play_context.remote_addr):
self._connection = self._get_connection(cvars, templar, current_connection)
else:
# if connection is reused, its _play_context is no longer valid and needs
# to be replaced with the one templated above, in case other data changed
self._connection._play_context = self._play_context
self._set_become_plugin(cvars, templar, self._connection)
plugin_vars = self._set_connection_options(cvars, templar)
# make a copy of the job vars here, as we update them here and later,
# but don't want to pollute original
vars_copy = variables.copy()
# update with connection info (i.e ansible_host/ansible_user)
self._connection.update_vars(vars_copy)
templar.available_variables = vars_copy
# TODO: eventually remove as pc is taken out of the resolution path
# feed back into pc to ensure plugins not using get_option can get correct value
self._connection._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=vars_copy, templar=templar)
# TODO: eventually remove this block as this should be a 'consequence' of 'forced_local' modules, right now rely on remote_is_local connection
# special handling for python interpreter for network_os, default to ansible python unless overridden
if 'ansible_python_interpreter' not in cvars and 'ansible_network_os' in cvars and getattr(self._connection, '_remote_is_local', False):
# this also avoids 'python discovery'
cvars['ansible_python_interpreter'] = sys.executable
# get handler
self._handler, module_context = self._get_action_handler_with_module_context(templar=templar)
if module_context is not None:
module_defaults_fqcn = module_context.resolved_fqcn
else:
module_defaults_fqcn = self._task.resolved_action
# Apply default params for action/module, if present
self._task.args = get_action_args_with_defaults(
module_defaults_fqcn, self._task.args, self._task.module_defaults, templar,
action_groups=self._task._parent._play._action_groups
)
# And filter out any fields which were set to default(omit), and got the omit token value
omit_token = variables.get('omit')
if omit_token is not None:
self._task.args = remove_omit(self._task.args, omit_token)
retries = 1 # includes the default actual run + retries set by user/default
if self._task.retries is not None:
retries += max(0, self._task.retries)
elif self._task.until:
retries += 3 # the default is not set in FA because we need to differentiate "unset" value
delay = self._task.delay
if delay < 0:
delay = 1
display.debug("starting attempt loop")
result = None
for attempt in range(1, retries + 1):
display.debug("running the handler")
try:
if self._task.timeout:
old_sig = signal.signal(signal.SIGALRM, task_timeout)
signal.alarm(self._task.timeout)
result = self._handler.run(task_vars=vars_copy)
except (AnsibleActionFail, AnsibleActionSkip) as e:
return e.result
except AnsibleConnectionFailure as e:
return dict(unreachable=True, msg=to_text(e))
except TaskTimeoutError as e:
msg = 'The %s action failed to execute in the expected time frame (%d) and was terminated' % (self._task.action, self._task.timeout)
return dict(failed=True, msg=msg)
finally:
if self._task.timeout:
signal.alarm(0)
old_sig = signal.signal(signal.SIGALRM, old_sig)
self._handler.cleanup()
display.debug("handler run complete")
# preserve no log
result["_ansible_no_log"] = no_log
if self._task.action not in C._ACTION_WITH_CLEAN_FACTS:
result = wrap_var(result)
# update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
if self._task.register:
if not isidentifier(self._task.register):
raise AnsibleError("Invalid variable name in 'register' specified: '%s'" % self._task.register)
vars_copy[self._task.register] = result
if self._task.async_val > 0:
if self._task.poll > 0 and not result.get('skipped') and not result.get('failed'):
result = self._poll_async_result(result=result, templar=templar, task_vars=vars_copy)
if result.get('failed'):
self._final_q.send_callback(
'v2_runner_on_async_failed',
TaskResult(self._host.name,
self._task._uuid,
result,
task_fields=self._task.dump_attrs()))
else:
self._final_q.send_callback(
'v2_runner_on_async_ok',
TaskResult(self._host.name,
self._task._uuid,
result,
task_fields=self._task.dump_attrs()))
# ensure no log is preserved
result["_ansible_no_log"] = no_log
# helper methods for use below in evaluating changed/failed_when
def _evaluate_changed_when_result(result):
if self._task.changed_when is not None and self._task.changed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.changed_when
result['changed'] = cond.evaluate_conditional(templar, vars_copy)
def _evaluate_failed_when_result(result):
if self._task.failed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.failed_when
failed_when_result = cond.evaluate_conditional(templar, vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
else:
failed_when_result = False
return failed_when_result
if 'ansible_facts' in result and self._task.action not in C._ACTION_DEBUG:
if self._task.action in C._ACTION_WITH_CLEAN_FACTS:
if self._task.delegate_to and self._task.delegate_facts:
if '_ansible_delegated_vars' in vars_copy:
vars_copy['_ansible_delegated_vars'].update(result['ansible_facts'])
else:
vars_copy['_ansible_delegated_vars'] = result['ansible_facts']
else:
vars_copy.update(result['ansible_facts'])
else:
# TODO: cleaning of facts should eventually become part of taskresults instead of vars
af = wrap_var(result['ansible_facts'])
vars_copy['ansible_facts'] = combine_vars(vars_copy.get('ansible_facts', {}), namespace_facts(af))
if C.INJECT_FACTS_AS_VARS:
vars_copy.update(clean_facts(af))
# set the failed property if it was missing.
if 'failed' not in result:
# rc is here for backwards compatibility and modules that use it instead of 'failed'
if 'rc' in result and result['rc'] not in [0, "0"]:
result['failed'] = True
else:
result['failed'] = False
# Make attempts and retries available early to allow their use in changed/failed_when
if retries > 1:
result['attempts'] = attempt
# set the changed property if it was missing.
if 'changed' not in result:
result['changed'] = False
if self._task.action not in C._ACTION_WITH_CLEAN_FACTS:
result = wrap_var(result)
# re-update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
# This gives changed/failed_when access to additional recently modified
# attributes of result
if self._task.register:
vars_copy[self._task.register] = result
# if we didn't skip this task, use the helpers to evaluate the changed/
# failed_when properties
if 'skipped' not in result:
condname = 'changed'
try:
_evaluate_changed_when_result(result)
condname = 'failed'
_evaluate_failed_when_result(result)
except AnsibleError as e:
result['failed'] = True
result['%s_when_result' % condname] = to_text(e)
if retries > 1:
cond = Conditional(loader=self._loader)
cond.when = self._task.until or [not result['failed']]
if cond.evaluate_conditional(templar, vars_copy):
break
else:
# no conditional check, or it failed, so sleep for the specified time
if attempt < retries:
result['_ansible_retry'] = True
result['retries'] = retries
display.debug('Retrying task, attempt %d of %d' % (attempt, retries))
self._final_q.send_callback(
'v2_runner_retry',
TaskResult(
self._host.name,
self._task._uuid,
result,
task_fields=self._task.dump_attrs()
)
)
time.sleep(delay)
self._handler = self._get_action_handler(templar=templar)
else:
if retries > 1:
# we ran out of attempts, so mark the result as failed
result['attempts'] = retries - 1
result['failed'] = True
if self._task.action not in C._ACTION_WITH_CLEAN_FACTS:
result = wrap_var(result)
# do the final update of the local variables here, for both registered
# values and any facts which may have been created
if self._task.register:
variables[self._task.register] = result
if 'ansible_facts' in result and self._task.action not in C._ACTION_DEBUG:
if self._task.action in C._ACTION_WITH_CLEAN_FACTS:
variables.update(result['ansible_facts'])
else:
# TODO: cleaning of facts should eventually become part of taskresults instead of vars
af = wrap_var(result['ansible_facts'])
variables['ansible_facts'] = combine_vars(variables.get('ansible_facts', {}), namespace_facts(af))
if C.INJECT_FACTS_AS_VARS:
variables.update(clean_facts(af))
# save the notification target in the result, if it was specified, as
# this task may be running in a loop in which case the notification
# may be item-specific, ie. "notify: service {{item}}"
if self._task.notify is not None:
result['_ansible_notify'] = self._task.notify
# add the delegated vars to the result, so we can reference them
# on the results side without having to do any further templating
# also now add connection vars results when delegating
if self._task.delegate_to:
result["_ansible_delegated_vars"] = {'ansible_delegated_host': self._task.delegate_to}
for k in plugin_vars:
result["_ansible_delegated_vars"][k] = cvars.get(k)
# note: here for callbacks that rely on this info to display delegation
for requireshed in ('ansible_host', 'ansible_port', 'ansible_user', 'ansible_connection'):
if requireshed not in result["_ansible_delegated_vars"] and requireshed in cvars:
result["_ansible_delegated_vars"][requireshed] = cvars.get(requireshed)
# and return
display.debug("attempt loop complete, returning result")
return result
def _poll_async_result(self, result, templar, task_vars=None):
'''
Polls for the specified JID to be complete
'''
if task_vars is None:
task_vars = self._job_vars
async_jid = result.get('ansible_job_id')
if async_jid is None:
return dict(failed=True, msg="No job id was returned by the async task")
# Create a new pseudo-task to run the async_status module, and run
# that (with a sleep for "poll" seconds between each retry) until the
# async time limit is exceeded.
async_task = Task.load(dict(action='async_status', args={'jid': async_jid}, environment=self._task.environment))
# FIXME: this is no longer the case, normal takes care of all, see if this can just be generalized
# Because this is an async task, the action handler is async. However,
# we need the 'normal' action handler for the status check, so get it
# now via the action_loader
async_handler = self._shared_loader_obj.action_loader.get(
'ansible.legacy.async_status',
task=async_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
time_left = self._task.async_val
while time_left > 0:
time.sleep(self._task.poll)
try:
async_result = async_handler.run(task_vars=task_vars)
# We do not bail out of the loop in cases where the failure
# is associated with a parsing error. The async_runner can
# have issues which result in a half-written/unparseable result
# file on disk, which manifests to the user as a timeout happening
# before it's time to timeout.
if (int(async_result.get('finished', 0)) == 1 or
('failed' in async_result and async_result.get('_ansible_parsed', False)) or
'skipped' in async_result):
break
except Exception as e:
# Connections can raise exceptions during polling (eg, network bounce, reboot); these should be non-fatal.
# On an exception, call the connection's reset method if it has one
# (eg, drop/recreate WinRM connection; some reused connections are in a broken state)
display.vvvv("Exception during async poll, retrying... (%s)" % to_text(e))
display.debug("Async poll exception was:\n%s" % to_text(traceback.format_exc()))
try:
async_handler._connection.reset()
except AttributeError:
pass
# Little hack to raise the exception if we've exhausted the timeout period
time_left -= self._task.poll
if time_left <= 0:
raise
else:
time_left -= self._task.poll
self._final_q.send_callback(
'v2_runner_on_async_poll',
TaskResult(
self._host.name,
async_task._uuid,
async_result,
task_fields=async_task.dump_attrs(),
),
)
if int(async_result.get('finished', 0)) != 1:
if async_result.get('_ansible_parsed'):
return dict(failed=True, msg="async task did not complete within the requested time - %ss" % self._task.async_val, async_result=async_result)
else:
return dict(failed=True, msg="async task produced unparseable results", async_result=async_result)
else:
# If the async task finished, automatically cleanup the temporary
# status file left behind.
cleanup_task = Task.load(
{
'async_status': {
'jid': async_jid,
'mode': 'cleanup',
},
'environment': self._task.environment,
}
)
cleanup_handler = self._shared_loader_obj.action_loader.get(
'ansible.legacy.async_status',
task=cleanup_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
cleanup_handler.run(task_vars=task_vars)
cleanup_handler.cleanup(force=True)
async_handler.cleanup(force=True)
return async_result
def _get_become(self, name):
become = become_loader.get(name)
if not become:
raise AnsibleError("Invalid become method specified, could not find matching plugin: '%s'. "
"Use `ansible-doc -t become -l` to list available plugins." % name)
return become
def _get_connection(self, cvars, templar, current_connection):
'''
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
self._play_context.connection = current_connection
# TODO: play context has logic to update the connection for 'smart'
# (default value, will chose between ssh and paramiko) and 'persistent'
# (really paramiko), eventually this should move to task object itself.
conn_type = self._play_context.connection
connection, plugin_load_context = self._shared_loader_obj.connection_loader.get_with_context(
conn_type,
self._play_context,
self._new_stdin,
task_uuid=self._task._uuid,
ansible_playbook_pid=to_text(os.getppid())
)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
self._set_become_plugin(cvars, templar, connection)
# Also backwards compat call for those still using play_context
self._play_context.set_attributes_from_plugin(connection)
return connection
def _set_become_plugin(self, cvars, templar, connection):
# load become plugin if needed
if cvars.get('ansible_become') is not None:
become = boolean(templar.template(cvars['ansible_become']))
else:
become = self._task.become
if become:
if cvars.get('ansible_become_method'):
become_plugin = self._get_become(templar.template(cvars['ansible_become_method']))
else:
become_plugin = self._get_become(self._task.become_method)
else:
# If become is not enabled on the task it needs to be removed from the connection plugin
# https://github.com/ansible/ansible/issues/78425
become_plugin = None
try:
connection.set_become_plugin(become_plugin)
except AttributeError:
# Older connection plugin that does not support set_become_plugin
pass
if become_plugin:
if getattr(connection.become, 'require_tty', False) and not getattr(connection, 'has_tty', False):
raise AnsibleError(
"The '%s' connection does not provide a TTY which is required for the selected "
"become plugin: %s." % (connection._load_name, become_plugin.name)
)
# Backwards compat for connection plugins that don't support become plugins
# Just do this unconditionally for now, we could move it inside of the
# AttributeError above later
self._play_context.set_become_plugin(become_plugin.name)
def _set_plugin_options(self, plugin_type, variables, templar, task_keys):
try:
plugin = getattr(self._connection, '_%s' % plugin_type)
except AttributeError:
# Some plugins are assigned to private attrs, ``become`` is not
plugin = getattr(self._connection, plugin_type)
# network_cli's "real" connection plugin is not named connection
# to avoid the confusion of having connection.connection
if plugin_type == "ssh_type_conn":
plugin_type = "connection"
option_vars = C.config.get_plugin_vars(plugin_type, plugin._load_name)
options = {}
for k in option_vars:
if k in variables:
options[k] = templar.template(variables[k])
# TODO move to task method?
plugin.set_options(task_keys=task_keys, var_options=options)
return option_vars
def _set_connection_options(self, variables, templar):
# keep list of variable names possibly consumed
varnames = []
# grab list of usable vars for this plugin
option_vars = C.config.get_plugin_vars('connection', self._connection._load_name)
varnames.extend(option_vars)
# create dict of 'templated vars'
options = {'_extras': {}}
for k in option_vars:
if k in variables:
options[k] = templar.template(variables[k])
# add extras if plugin supports them
if getattr(self._connection, 'allow_extras', False):
for k in variables:
if k.startswith('ansible_%s_' % self._connection._load_name) and k not in options:
options['_extras'][k] = templar.template(variables[k])
task_keys = self._task.dump_attrs()
# The task_keys 'timeout' attr is the task's timeout, not the connection timeout.
# The connection timeout is threaded through the play_context for now.
task_keys['timeout'] = self._play_context.timeout
if self._play_context.password:
# The connection password is threaded through the play_context for
# now. This is something we ultimately want to avoid, but the first
# step is to get connection plugins pulling the password through the
# config system instead of directly accessing play_context.
task_keys['password'] = self._play_context.password
# Prevent task retries from overriding connection retries
del task_keys['retries']
# set options with 'templated vars' specific to this plugin and dependent ones
self._connection.set_options(task_keys=task_keys, var_options=options)
varnames.extend(self._set_plugin_options('shell', variables, templar, task_keys))
if self._connection.become is not None:
if self._play_context.become_pass:
# FIXME: eventually remove from task and play_context, here for backwards compat
# keep out of play objects to avoid accidental disclosure, only become plugin should have
# The become pass is already in the play_context if given on
# the CLI (-K). Make the plugin aware of it in this case.
task_keys['become_pass'] = self._play_context.become_pass
varnames.extend(self._set_plugin_options('become', variables, templar, task_keys))
# FOR BACKWARDS COMPAT:
for option in ('become_user', 'become_flags', 'become_exe', 'become_pass'):
try:
setattr(self._play_context, option, self._connection.become.get_option(option))
except KeyError:
pass # some plugins don't support all base flags
self._play_context.prompt = self._connection.become.prompt
# deals with networking sub_plugins (network_cli/httpapi/netconf)
sub = getattr(self._connection, '_sub_plugin', None)
if sub is not None and sub.get('type') != 'external':
plugin_type = get_plugin_class(sub.get("obj"))
varnames.extend(self._set_plugin_options(plugin_type, variables, templar, task_keys))
sub_conn = getattr(self._connection, 'ssh_type_conn', None)
if sub_conn is not None:
varnames.extend(self._set_plugin_options("ssh_type_conn", variables, templar, task_keys))
return varnames
def _get_action_handler(self, templar):
'''
Returns the correct action plugin to handle the requestion task action
'''
return self._get_action_handler_with_module_context(templar)[0]
def _get_action_handler_with_module_context(self, templar):
'''
Returns the correct action plugin to handle the requestion task action and the module context
'''
module_collection, separator, module_name = self._task.action.rpartition(".")
module_prefix = module_name.split('_')[0]
if module_collection:
# For network modules, which look for one action plugin per platform, look for the
# action plugin in the same collection as the module by prefixing the action plugin
# with the same collection.
network_action = "{0}.{1}".format(module_collection, module_prefix)
else:
network_action = module_prefix
collections = self._task.collections
# Check if the module has specified an action handler
module = self._shared_loader_obj.module_loader.find_plugin_with_context(
self._task.action, collection_list=collections
)
if not module.resolved or not module.action_plugin:
module = None
if module is not None:
handler_name = module.action_plugin
# let action plugin override module, fallback to 'normal' action plugin otherwise
elif self._shared_loader_obj.action_loader.has_plugin(self._task.action, collection_list=collections):
handler_name = self._task.action
elif all((module_prefix in C.NETWORK_GROUP_MODULES, self._shared_loader_obj.action_loader.has_plugin(network_action, collection_list=collections))):
handler_name = network_action
display.vvvv("Using network group action {handler} for {action}".format(handler=handler_name,
action=self._task.action),
host=self._play_context.remote_addr)
else:
# use ansible.legacy.normal to allow (historic) local action_plugins/ override without collections search
handler_name = 'ansible.legacy.normal'
collections = None # until then, we don't want the task's collection list to be consulted; use the builtin
# networking/psersistent connections handling
if any(((self._connection.supports_persistence and C.USE_PERSISTENT_CONNECTIONS), self._connection.force_persistence)):
# check handler in case we dont need to do all the work to setup persistent connection
handler_class = self._shared_loader_obj.action_loader.get(handler_name, class_only=True)
if getattr(handler_class, '_requires_connection', True):
# for persistent connections, initialize socket path and start connection manager
self._play_context.timeout = self._connection.get_option('persistent_command_timeout')
display.vvvv('attempting to start connection', host=self._play_context.remote_addr)
display.vvvv('using connection plugin %s' % self._connection.transport, host=self._play_context.remote_addr)
options = self._connection.get_options()
socket_path = start_connection(self._play_context, options, self._task._uuid)
display.vvvv('local domain socket path is %s' % socket_path, host=self._play_context.remote_addr)
setattr(self._connection, '_socket_path', socket_path)
else:
# TODO: set self._connection to dummy/noop connection, using local for now
self._connection = self._get_connection({}, templar, 'local')
handler = self._shared_loader_obj.action_loader.get(
handler_name,
task=self._task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
collection_list=collections
)
if not handler:
raise AnsibleError("the handler '%s' was not found" % handler_name)
return handler, module
def start_connection(play_context, options, task_uuid):
'''
Starts the persistent connection
'''
candidate_paths = [C.ANSIBLE_CONNECTION_PATH or os.path.dirname(sys.argv[0])]
candidate_paths.extend(os.environ.get('PATH', '').split(os.pathsep))
for dirname in candidate_paths:
ansible_connection = os.path.join(dirname, 'ansible-connection')
if os.path.isfile(ansible_connection):
display.vvvv("Found ansible-connection at path {0}".format(ansible_connection))
break
else:
raise AnsibleError("Unable to find location of 'ansible-connection'. "
"Please set or check the value of ANSIBLE_CONNECTION_PATH")
env = os.environ.copy()
env.update({
# HACK; most of these paths may change during the controller's lifetime
# (eg, due to late dynamic role includes, multi-playbook execution), without a way
# to invalidate/update, ansible-connection won't always see the same plugins the controller
# can.
'ANSIBLE_BECOME_PLUGINS': become_loader.print_paths(),
'ANSIBLE_CLICONF_PLUGINS': cliconf_loader.print_paths(),
'ANSIBLE_COLLECTIONS_PATH': to_native(os.pathsep.join(AnsibleCollectionConfig.collection_paths)),
'ANSIBLE_CONNECTION_PLUGINS': connection_loader.print_paths(),
'ANSIBLE_HTTPAPI_PLUGINS': httpapi_loader.print_paths(),
'ANSIBLE_NETCONF_PLUGINS': netconf_loader.print_paths(),
'ANSIBLE_TERMINAL_PLUGINS': terminal_loader.print_paths(),
})
verbosity = []
if display.verbosity:
verbosity.append('-%s' % ('v' * display.verbosity))
python = sys.executable
master, slave = pty.openpty()
p = subprocess.Popen(
[python, ansible_connection, *verbosity, to_text(os.getppid()), to_text(task_uuid)],
stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
)
os.close(slave)
# We need to set the pty into noncanonical mode. This ensures that we
# can receive lines longer than 4095 characters (plus newline) without
# truncating.
old = termios.tcgetattr(master)
new = termios.tcgetattr(master)
new[3] = new[3] & ~termios.ICANON
try:
termios.tcsetattr(master, termios.TCSANOW, new)
write_to_file_descriptor(master, options)
write_to_file_descriptor(master, play_context.serialize())
(stdout, stderr) = p.communicate()
finally:
termios.tcsetattr(master, termios.TCSANOW, old)
os.close(master)
if p.returncode == 0:
result = json.loads(to_text(stdout, errors='surrogate_then_replace'))
else:
try:
result = json.loads(to_text(stderr, errors='surrogate_then_replace'))
except json.decoder.JSONDecodeError:
result = {'error': to_text(stderr, errors='surrogate_then_replace')}
if 'messages' in result:
for level, message in result['messages']:
if level == 'log':
display.display(message, log_only=True)
elif level in ('debug', 'v', 'vv', 'vvv', 'vvvv', 'vvvvv', 'vvvvvv'):
getattr(display, level)(message, host=play_context.remote_addr)
else:
if hasattr(display, level):
getattr(display, level)(message)
else:
display.vvvv(message, host=play_context.remote_addr)
if 'error' in result:
if display.verbosity > 2:
if result.get('exception'):
msg = "The full traceback is:\n" + result['exception']
display.display(msg, color=C.COLOR_ERROR)
raise AnsibleError(result['error'])
return result['socket_path']
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,264 |
delegate_to: "{{var}}" when: var != "" causes a "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead" error on 2.16
|
### Summary
I have a task to conditionally install an SSH key on another machine:
```yaml
- authorized_key: ...
delegate_to: "{{ jenkins_install_key_on }}"
when: jenkins_install_key_on != ""
```
This works fine when `jenkins_install_key_on` is set to a non-blank value, but fails when `jenkins_install_key_on` is set to an empty string:
```
fatal: [jammy -> {{ jenkins_install_key_on }}]: FAILED! => {"msg": "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead"}
```
It used to work with ansible-core 2.15 and older.
### Issue Type
Bug Report
### Component Name
delegate_to
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = /home/mg/src/deployments/provisioning/ansible.cfg
configured module search path = ['/home/mg/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible
ansible collection location = /home/mg/.ansible/collections:/usr/share/ansible/collections
executable location = /home/mg/.local/bin/ansible
python version = 3.11.6 (main, Oct 8 2023, 05:06:43) [GCC 13.2.0] (/home/mg/.local/pipx/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ACTION_WARNINGS(/home/mg/src/deployments/provisioning/ansible.cfg) = False
CACHE_PLUGIN(/home/mg/src/deployments/provisioning/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/home/mg/src/deployments/provisioning/ansible.cfg) = .cache/facts/
CACHE_PLUGIN_TIMEOUT(/home/mg/src/deployments/provisioning/ansible.cfg) = 86400
CALLBACKS_ENABLED(/home/mg/src/deployments/provisioning/ansible.cfg) = ['fancy_html']
CONFIG_FILE() = /home/mg/src/deployments/provisioning/ansible.cfg
DEFAULT_FORKS(/home/mg/src/deployments/provisioning/ansible.cfg) = 15
DEFAULT_GATHERING(/home/mg/src/deployments/provisioning/ansible.cfg) = smart
DEFAULT_HOST_LIST(/home/mg/src/deployments/provisioning/ansible.cfg) = ['/home/mg/src/deployments/provisioning/inventory']
DEFAULT_LOG_PATH(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/.cache/ansible.log
DEFAULT_REMOTE_USER(/home/mg/src/deployments/provisioning/ansible.cfg) = root
DEFAULT_STDOUT_CALLBACK(/home/mg/src/deployments/provisioning/ansible.cfg) = yaml
DEFAULT_VAULT_PASSWORD_FILE(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/askpas>
EDITOR(env: EDITOR) = vim
INTERPRETER_PYTHON(/home/mg/src/deployments/provisioning/ansible.cfg) = python3
RETRY_FILES_ENABLED(/home/mg/src/deployments/provisioning/ansible.cfg) = False
CACHE:
=====
jsonfile:
________
_timeout(/home/mg/src/deployments/provisioning/ansible.cfg) = 86400
_uri(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/.cache/facts
CONNECTION:
==========
paramiko_ssh:
____________
remote_user(/home/mg/src/deployments/provisioning/ansible.cfg) = root
ssh:
___
remote_user(/home/mg/src/deployments/provisioning/ansible.cfg) = root
```
### OS / Environment
Ubuntu 23.10
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- hosts: all
gather_facts: no
tasks:
- debug: msg="hello"
delegate_to: "{{ var }}"
when: var != ""
vars:
var: ""
```
### Expected Results
I expect the task to be skipped.
### Actual Results
```console
$ ansible-playbook -i localhost, ansible-delegate-to-blank.yml -vvvv
ansible-playbook [core 2.16.0]
config file = None
configured module search path = ['/home/mg/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible
ansible collection location = /home/mg/.ansible/collections:/usr/share/ansible/collections
executable location = /home/mg/.local/bin/ansible-playbook
python version = 3.11.6 (main, Oct 8 2023, 05:06:43) [GCC 13.2.0] (/home/mg/.local/pipx/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
setting up inventory plugins
Loading collection ansible.builtin from
Set default localhost to localhost
Parsed localhost, inventory source with host_list plugin
Loading callback plugin default of type stdout, v2.0 from /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible/plugins/callback/default.py
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: ansible-delegate-to-blank.yml **************************************************************************************
Positional arguments: ansible-delegate-to-blank.yml
verbosity: 4
connection: ssh
become_method: sudo
tags: ('all',)
inventory: ('localhost,',)
forks: 5
1 plays in ansible-delegate-to-blank.yml
PLAY [all] *******************************************************************************************************************
TASK [debug] *****************************************************************************************************************
task path: /home/mg/tmp/ansible-delegate-to-blank.yml:4
fatal: [localhost -> {{ var }}]: FAILED! => {
"msg": "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead"
}
PLAY RECAP *******************************************************************************************************************
localhost : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82264
|
https://github.com/ansible/ansible/pull/82319
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
|
6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0
| 2023-11-22T08:22:01Z |
python
| 2023-12-04T15:19:12Z |
lib/ansible/vars/manager.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import os
import sys
from collections import defaultdict
from collections.abc import Mapping, MutableMapping, Sequence
from hashlib import sha1
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound, AnsibleAssertionError, AnsibleTemplateError
from ansible.inventory.host import Host
from ansible.inventory.helpers import sort_groups, get_group_vars
from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.six import text_type, string_types
from ansible.plugins.loader import lookup_loader
from ansible.vars.fact_cache import FactCache
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.vars import combine_vars, load_extra_vars, load_options_vars
from ansible.utils.unsafe_proxy import wrap_var
from ansible.vars.clean import namespace_facts, clean_facts
from ansible.vars.plugins import get_vars_from_inventory_sources, get_vars_from_path
display = Display()
def preprocess_vars(a):
'''
Ensures that vars contained in the parameter passed in are
returned as a list of dictionaries, to ensure for instance
that vars loaded from a file conform to an expected state.
'''
if a is None:
return None
elif not isinstance(a, list):
data = [a]
else:
data = a
for item in data:
if not isinstance(item, MutableMapping):
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
return data
class VariableManager:
_ALLOWED = frozenset(['plugins_by_group', 'groups_plugins_play', 'groups_plugins_inventory', 'groups_inventory',
'all_plugins_play', 'all_plugins_inventory', 'all_inventory'])
def __init__(self, loader=None, inventory=None, version_info=None):
self._nonpersistent_fact_cache = defaultdict(dict)
self._vars_cache = defaultdict(dict)
self._extra_vars = defaultdict(dict)
self._host_vars_files = defaultdict(dict)
self._group_vars_files = defaultdict(dict)
self._inventory = inventory
self._loader = loader
self._hostvars = None
self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
self._options_vars = load_options_vars(version_info)
# If the basedir is specified as the empty string then it results in cwd being used.
# This is not a safe location to load vars from.
basedir = self._options_vars.get('basedir', False)
self.safe_basedir = bool(basedir is False or basedir)
# load extra vars
self._extra_vars = load_extra_vars(loader=self._loader)
# load fact cache
try:
self._fact_cache = FactCache()
except AnsibleError as e:
# bad cache plugin is not fatal error
# fallback to a dict as in memory cache
display.warning(to_text(e))
self._fact_cache = {}
def __getstate__(self):
data = dict(
fact_cache=self._fact_cache,
np_fact_cache=self._nonpersistent_fact_cache,
vars_cache=self._vars_cache,
extra_vars=self._extra_vars,
host_vars_files=self._host_vars_files,
group_vars_files=self._group_vars_files,
omit_token=self._omit_token,
options_vars=self._options_vars,
inventory=self._inventory,
safe_basedir=self.safe_basedir,
)
return data
def __setstate__(self, data):
self._fact_cache = data.get('fact_cache', defaultdict(dict))
self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
self._vars_cache = data.get('vars_cache', defaultdict(dict))
self._extra_vars = data.get('extra_vars', dict())
self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
self._inventory = data.get('inventory', None)
self._options_vars = data.get('options_vars', dict())
self.safe_basedir = data.get('safe_basedir', False)
self._loader = None
self._hostvars = None
@property
def extra_vars(self):
return self._extra_vars
def set_inventory(self, inventory):
self._inventory = inventory
def get_vars(self, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=False, use_cache=True,
_hosts=None, _hosts_all=None, stage='task'):
'''
Returns the variables, with optional "context" given via the parameters
for the play, host, and task (which could possibly result in different
sets of variables being returned due to the additional context).
The order of precedence is:
- play->roles->get_default_vars (if there is a play context)
- group_vars_files[host] (if there is a host context)
- host_vars_files[host] (if there is a host context)
- host->get_vars (if there is a host context)
- fact_cache[host] (if there is a host context)
- play vars (if there is a play context)
- play vars_files (if there's no host context, ignore
file names that cannot be templated)
- task->get_vars (if there is a task context)
- vars_cache[host] (if there is a host context)
- extra vars
``_hosts`` and ``_hosts_all`` should be considered private args, with only internal trusted callers relying
on the functionality they provide. These arguments may be removed at a later date without a deprecation
period and without warning.
'''
display.debug("in VariableManager get_vars()")
all_vars = dict()
magic_variables = self._get_magic_variables(
play=play,
host=host,
task=task,
include_hostvars=include_hostvars,
_hosts=_hosts,
_hosts_all=_hosts_all,
)
_vars_sources = {}
def _combine_and_track(data, new_data, source):
'''
Wrapper function to update var sources dict and call combine_vars()
See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
'''
if new_data == {}:
return data
if C.DEFAULT_DEBUG:
# Populate var sources dict
for key in new_data:
_vars_sources[key] = source
return combine_vars(data, new_data)
# default for all cases
basedirs = []
if self.safe_basedir: # avoid adhoc/console loading cwd
basedirs = [self._loader.get_basedir()]
if play:
for role in play.get_roles():
# role is public and
# either static or dynamic and completed
# role is not set
# use config option as default
role_is_static_or_completed = role.static or role._completed.get(host.name, False)
if role.public and role_is_static_or_completed or \
role.public is None and not C.DEFAULT_PRIVATE_ROLE_VARS and role_is_static_or_completed:
all_vars = _combine_and_track(all_vars, role.get_default_vars(), "role '%s' defaults" % role.name)
if task:
# set basedirs
if C.PLAYBOOK_VARS_ROOT == 'all': # should be default
basedirs = task.get_search_path()
elif C.PLAYBOOK_VARS_ROOT in ('bottom', 'playbook_dir'): # only option in 2.4.0
basedirs = [task.get_search_path()[0]]
elif C.PLAYBOOK_VARS_ROOT != 'top':
# preserves default basedirs, only option pre 2.3
raise AnsibleError('Unknown playbook vars logic: %s' % C.PLAYBOOK_VARS_ROOT)
# if we have a task in this context, and that task has a role, make
# sure it sees its defaults above any other roles, as we previously
# (v1) made sure each task had a copy of its roles default vars
# TODO: investigate why we need play or include_role check?
if task._role is not None and (play or task.action in C._ACTION_INCLUDE_ROLE):
all_vars = _combine_and_track(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()),
"role '%s' defaults" % task._role.name)
if host:
# THE 'all' group and the rest of groups for a host, used below
all_group = self._inventory.groups.get('all')
host_groups = sort_groups([g for g in host.get_groups() if g.name not in ['all']])
def _get_plugin_vars(plugin, path, entities):
data = {}
try:
data = plugin.get_vars(self._loader, path, entities)
except AttributeError:
try:
for entity in entities:
if isinstance(entity, Host):
data |= plugin.get_host_vars(entity.name)
else:
data |= plugin.get_group_vars(entity.name)
except AttributeError:
if hasattr(plugin, 'run'):
raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
else:
raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
return data
# internal functions that actually do the work
def _plugins_inventory(entities):
''' merges all entities by inventory source '''
return get_vars_from_inventory_sources(self._loader, self._inventory._sources, entities, stage)
def _plugins_play(entities):
''' merges all entities adjacent to play '''
data = {}
for path in basedirs:
data = _combine_and_track(data, get_vars_from_path(self._loader, path, entities, stage), "path '%s'" % path)
return data
# configurable functions that are sortable via config, remember to add to _ALLOWED if expanding this list
def all_inventory():
return all_group.get_vars()
def all_plugins_inventory():
return _plugins_inventory([all_group])
def all_plugins_play():
return _plugins_play([all_group])
def groups_inventory():
''' gets group vars from inventory '''
return get_group_vars(host_groups)
def groups_plugins_inventory():
''' gets plugin sources from inventory for groups '''
return _plugins_inventory(host_groups)
def groups_plugins_play():
''' gets plugin sources from play for groups '''
return _plugins_play(host_groups)
def plugins_by_groups():
'''
merges all plugin sources by group,
This should be used instead, NOT in combination with the other groups_plugins* functions
'''
data = {}
for group in host_groups:
data[group] = _combine_and_track(data[group], _plugins_inventory(group), "inventory group_vars for '%s'" % group)
data[group] = _combine_and_track(data[group], _plugins_play(group), "playbook group_vars for '%s'" % group)
return data
# Merge groups as per precedence config
# only allow to call the functions we want exposed
for entry in C.VARIABLE_PRECEDENCE:
if entry in self._ALLOWED:
display.debug('Calling %s to load vars for %s' % (entry, host.name))
all_vars = _combine_and_track(all_vars, locals()[entry](), "group vars, precedence entry '%s'" % entry)
else:
display.warning('Ignoring unknown variable precedence entry: %s' % (entry))
# host vars, from inventory, inventory adjacent and play adjacent via plugins
all_vars = _combine_and_track(all_vars, host.get_vars(), "host vars for '%s'" % host)
all_vars = _combine_and_track(all_vars, _plugins_inventory([host]), "inventory host_vars for '%s'" % host)
all_vars = _combine_and_track(all_vars, _plugins_play([host]), "playbook host_vars for '%s'" % host)
# finally, the facts caches for this host, if it exists
# TODO: cleaning of facts should eventually become part of taskresults instead of vars
try:
facts = wrap_var(self._fact_cache.get(host.name, {}))
all_vars |= namespace_facts(facts)
# push facts to main namespace
if C.INJECT_FACTS_AS_VARS:
all_vars = _combine_and_track(all_vars, wrap_var(clean_facts(facts)), "facts")
else:
# always 'promote' ansible_local
all_vars = _combine_and_track(all_vars, wrap_var({'ansible_local': facts.get('ansible_local', {})}), "facts")
except KeyError:
pass
if play:
all_vars = _combine_and_track(all_vars, play.get_vars(), "play vars")
vars_files = play.get_vars_files()
try:
for vars_file_item in vars_files:
# create a set of temporary vars here, which incorporate the extra
# and magic vars so we can properly template the vars_files entries
# NOTE: this makes them depend on host vars/facts so things like
# ansible_facts['os_distribution'] can be used, ala include_vars.
# Consider DEPRECATING this in the future, since we have include_vars ...
temp_vars = combine_vars(all_vars, self._extra_vars)
temp_vars = combine_vars(temp_vars, magic_variables)
templar = Templar(loader=self._loader, variables=temp_vars)
# we assume each item in the list is itself a list, as we
# support "conditional includes" for vars_files, which mimics
# the with_first_found mechanism.
vars_file_list = vars_file_item
if not isinstance(vars_file_list, list):
vars_file_list = [vars_file_list]
# now we iterate through the (potential) files, and break out
# as soon as we read one from the list. If none are found, we
# raise an error, which is silently ignored at this point.
try:
for vars_file in vars_file_list:
vars_file = templar.template(vars_file)
if not (isinstance(vars_file, Sequence)):
raise AnsibleError(
"Invalid vars_files entry found: %r\n"
"vars_files entries should be either a string type or "
"a list of string types after template expansion" % vars_file
)
try:
play_search_stack = play.get_search_path()
found_file = real_file = self._loader.path_dwim_relative_stack(play_search_stack, 'vars', vars_file)
data = preprocess_vars(self._loader.load_from_file(found_file, unsafe=True, cache=False))
if data is not None:
for item in data:
all_vars = _combine_and_track(all_vars, item, "play vars_files from '%s'" % vars_file)
break
except AnsibleFileNotFound:
# we continue on loader failures
continue
except AnsibleParserError:
raise
else:
# if include_delegate_to is set to False or we don't have a host, we ignore the missing
# vars file here because we're working on a delegated host or require host vars, see NOTE above
if include_delegate_to and host:
raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
except (UndefinedError, AnsibleUndefinedVariable):
if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'"
% vars_file_item, obj=vars_file_item)
else:
# we do not have a full context here, and the missing variable could be because of that
# so just show a warning and continue
display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
continue
display.vvv("Read vars_file '%s'" % vars_file_item)
except TypeError:
raise AnsibleParserError("Error while reading vars files - please supply a list of file names. "
"Got '%s' of type %s" % (vars_files, type(vars_files)))
# We now merge in all exported vars from all roles in the play,
# unless the user has disabled this
# role is public and
# either static or dynamic and completed
# role is not set
# use config option as default
for role in play.get_roles():
role_is_static_or_completed = role.static or role._completed.get(host.name, False)
if role.public and role_is_static_or_completed or \
role.public is None and not C.DEFAULT_PRIVATE_ROLE_VARS and role_is_static_or_completed:
all_vars = _combine_and_track(all_vars, role.get_vars(include_params=False, only_exports=True), "role '%s' exported vars" % role.name)
# next, we merge in the vars from the role, which will specifically
# follow the role dependency chain, and then we merge in the tasks
# vars (which will look at parent blocks/task includes)
if task:
if task._role:
all_vars = _combine_and_track(all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False, only_exports=False),
"role '%s' all vars" % task._role.name)
all_vars = _combine_and_track(all_vars, task.get_vars(), "task vars")
# next, we merge in the vars cache (include vars) and nonpersistent
# facts cache (set_fact/register), in that order
if host:
# include_vars non-persistent cache
all_vars = _combine_and_track(all_vars, self._vars_cache.get(host.get_name(), dict()), "include_vars")
# fact non-persistent cache
all_vars = _combine_and_track(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()), "set_fact")
# next, we merge in role params and task include params
if task:
# special case for include tasks, where the include params
# may be specified in the vars field for the task, which should
# have higher precedence than the vars/np facts above
if task._role:
all_vars = _combine_and_track(all_vars, task._role.get_role_params(task.get_dep_chain()), "role params")
all_vars = _combine_and_track(all_vars, task.get_include_params(), "include params")
# extra vars
all_vars = _combine_and_track(all_vars, self._extra_vars, "extra vars")
# magic variables
all_vars = _combine_and_track(all_vars, magic_variables, "magic vars")
# special case for the 'environment' magic variable, as someone
# may have set it as a variable and we don't want to stomp on it
if task:
all_vars['environment'] = task.environment
# 'vars' magic var
if task or play:
# has to be copy, otherwise recursive ref
all_vars['vars'] = all_vars.copy()
# if we have a host and task and we're delegating to another host,
# figure out the variables for that host now so we don't have to rely on host vars later
if task and host and task.delegate_to is not None and include_delegate_to:
all_vars['ansible_delegated_vars'], all_vars['_ansible_loop_cache'] = self._get_delegated_vars(play, task, all_vars)
display.debug("done with get_vars()")
if C.DEFAULT_DEBUG:
# Use VarsWithSources wrapper class to display var sources
return VarsWithSources.new_vars_with_sources(all_vars, _vars_sources)
else:
return all_vars
def _get_magic_variables(self, play, host, task, include_hostvars, _hosts=None, _hosts_all=None):
'''
Returns a dictionary of so-called "magic" variables in Ansible,
which are special variables we set internally for use.
'''
variables = {}
variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
variables['ansible_playbook_python'] = sys.executable
variables['ansible_config_file'] = C.CONFIG_FILE
if play:
# This is a list of all role names of all dependencies for all roles for this play
dependency_role_names = list({d.get_name() for r in play.roles for d in r.get_all_dependencies()})
# This is a list of all role names of all roles for this play
play_role_names = [r.get_name() for r in play.roles]
# ansible_role_names includes all role names, dependent or directly referenced by the play
variables['ansible_role_names'] = list(set(dependency_role_names + play_role_names))
# ansible_play_role_names includes the names of all roles directly referenced by this play
# roles that are implicitly referenced via dependencies are not listed.
variables['ansible_play_role_names'] = play_role_names
# ansible_dependent_role_names includes the names of all roles that are referenced via dependencies
# dependencies that are also explicitly named as roles are included in this list
variables['ansible_dependent_role_names'] = dependency_role_names
# DEPRECATED: role_names should be deprecated in favor of ansible_role_names or ansible_play_role_names
variables['role_names'] = variables['ansible_play_role_names']
variables['ansible_play_name'] = play.get_name()
if task:
if task._role:
variables['role_name'] = task._role.get_name(include_role_fqcn=False)
variables['role_path'] = task._role._role_path
variables['role_uuid'] = text_type(task._role._uuid)
variables['ansible_collection_name'] = task._role._role_collection
variables['ansible_role_name'] = task._role.get_name()
if self._inventory is not None:
variables['groups'] = self._inventory.get_groups_dict()
if play:
templar = Templar(loader=self._loader)
if not play.finalized and templar.is_template(play.hosts):
pattern = 'all'
else:
pattern = play.hosts or 'all'
# add the list of hosts in the play, as adjusted for limit/filters
if not _hosts_all:
_hosts_all = [h.name for h in self._inventory.get_hosts(pattern=pattern, ignore_restrictions=True)]
if not _hosts:
_hosts = [h.name for h in self._inventory.get_hosts()]
variables['ansible_play_hosts_all'] = _hosts_all[:]
variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
variables['ansible_play_batch'] = [x for x in _hosts if x not in play._removed_hosts]
# DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
# however this would take work in the templating engine, so for now we'll add both
variables['play_hosts'] = variables['ansible_play_batch']
# the 'omit' value allows params to be left out if the variable they are based on is undefined
variables['omit'] = self._omit_token
# Set options vars
for option, option_value in self._options_vars.items():
variables[option] = option_value
if self._hostvars is not None and include_hostvars:
variables['hostvars'] = self._hostvars
return variables
def get_delegated_vars_and_hostname(self, templar, task, variables):
"""Get the delegated_vars for an individual task invocation, which may be be in the context
of an individual loop iteration.
Not used directly be VariableManager, but used primarily within TaskExecutor
"""
delegated_vars = {}
delegated_host_name = None
if task.delegate_to:
delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
delegated_host = self._inventory.get_host(delegated_host_name)
if delegated_host is None:
for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
# check if the address matches, or if both the delegated_to host
# and the current host are in the list of localhost aliases
if h.address == delegated_host_name:
delegated_host = h
break
else:
delegated_host = Host(name=delegated_host_name)
delegated_vars['ansible_delegated_vars'] = {
delegated_host_name: self.get_vars(
play=task.get_play(),
host=delegated_host,
task=task,
include_delegate_to=False,
include_hostvars=True,
)
}
delegated_vars['ansible_delegated_vars'][delegated_host_name]['inventory_hostname'] = variables.get('inventory_hostname')
return delegated_vars, delegated_host_name
def _get_delegated_vars(self, play, task, existing_variables):
# This method has a lot of code copied from ``TaskExecutor._get_loop_items``
# if this is failing, and ``TaskExecutor._get_loop_items`` is not
# then more will have to be copied here.
# TODO: dedupe code here and with ``TaskExecutor._get_loop_items``
# this may be possible once we move pre-processing pre fork
if not hasattr(task, 'loop'):
# This "task" is not a Task, so we need to skip it
return {}, None
display.deprecated(
'Getting delegated variables via get_vars is no longer used, and is handled within the TaskExecutor.',
version='2.18',
)
# we unfortunately need to template the delegate_to field here,
# as we're fetching vars before post_validate has been called on
# the task that has been passed in
vars_copy = existing_variables.copy()
# get search path for this task to pass to lookup plugins
vars_copy['ansible_search_path'] = task.get_search_path()
# ensure basedir is always in (dwim already searches here but we need to display it)
if self._loader.get_basedir() not in vars_copy['ansible_search_path']:
vars_copy['ansible_search_path'].append(self._loader.get_basedir())
templar = Templar(loader=self._loader, variables=vars_copy)
items = []
has_loop = True
if task.loop_with is not None:
if task.loop_with in lookup_loader:
fail = True
if task.loop_with == 'first_found':
# first_found loops are special. If the item is undefined then we want to fall through to the next
fail = False
try:
loop_terms = listify_lookup_plugin_terms(terms=task.loop, templar=templar, fail_on_undefined=fail, convert_bare=False)
if not fail:
loop_terms = [t for t in loop_terms if not templar.is_template(t)]
mylookup = lookup_loader.get(task.loop_with, loader=self._loader, templar=templar)
# give lookup task 'context' for subdir (mostly needed for first_found)
for subdir in ['template', 'var', 'file']: # TODO: move this to constants?
if subdir in task.action:
break
setattr(mylookup, '_subdir', subdir + 's')
items = wrap_var(mylookup.run(terms=loop_terms, variables=vars_copy))
except AnsibleTemplateError:
# This task will be skipped later due to this, so we just setup
# a dummy array for the later code so it doesn't fail
items = [None]
else:
raise AnsibleError("Failed to find the lookup named '%s' in the available lookup plugins" % task.loop_with)
elif task.loop is not None:
try:
items = templar.template(task.loop)
except AnsibleTemplateError:
# This task will be skipped later due to this, so we just setup
# a dummy array for the later code so it doesn't fail
items = [None]
else:
has_loop = False
items = [None]
# since host can change per loop, we keep dict per host name resolved
delegated_host_vars = dict()
item_var = getattr(task.loop_control, 'loop_var', 'item')
cache_items = False
for item in items:
# update the variables with the item value for templating, in case we need it
if item is not None:
vars_copy[item_var] = item
templar.available_variables = vars_copy
delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
if delegated_host_name != task.delegate_to:
cache_items = True
if delegated_host_name is None:
raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
if not isinstance(delegated_host_name, string_types):
raise AnsibleError(message="the field 'delegate_to' has an invalid type (%s), and could not be"
" converted to a string type." % type(delegated_host_name), obj=task._ds)
if delegated_host_name in delegated_host_vars:
# no need to repeat ourselves, as the delegate_to value
# does not appear to be tied to the loop item variable
continue
# now try to find the delegated-to host in inventory, or failing that,
# create a new host on the fly so we can fetch variables for it
delegated_host = None
if self._inventory is not None:
delegated_host = self._inventory.get_host(delegated_host_name)
# try looking it up based on the address field, and finally
# fall back to creating a host on the fly to use for the var lookup
if delegated_host is None:
for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
# check if the address matches, or if both the delegated_to host
# and the current host are in the list of localhost aliases
if h.address == delegated_host_name:
delegated_host = h
break
else:
delegated_host = Host(name=delegated_host_name)
else:
delegated_host = Host(name=delegated_host_name)
# now we go fetch the vars for the delegated-to host and save them in our
# master dictionary of variables to be used later in the TaskExecutor/PlayContext
delegated_host_vars[delegated_host_name] = self.get_vars(
play=play,
host=delegated_host,
task=task,
include_delegate_to=False,
include_hostvars=True,
)
delegated_host_vars[delegated_host_name]['inventory_hostname'] = vars_copy.get('inventory_hostname')
_ansible_loop_cache = None
if has_loop and cache_items:
# delegate_to templating produced a change, so we will cache the templated items
# in a special private hostvar
# this ensures that delegate_to+loop doesn't produce different results than TaskExecutor
# which may reprocess the loop
_ansible_loop_cache = items
return delegated_host_vars, _ansible_loop_cache
def clear_facts(self, hostname):
'''
Clears the facts for a host
'''
self._fact_cache.pop(hostname, None)
def set_host_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
if not isinstance(facts, Mapping):
raise AnsibleAssertionError("the type of 'facts' to set for host_facts should be a Mapping but is a %s" % type(facts))
try:
host_cache = self._fact_cache[host]
except KeyError:
# We get to set this as new
host_cache = facts
else:
if not isinstance(host_cache, MutableMapping):
raise TypeError('The object retrieved for {0} must be a MutableMapping but was'
' a {1}'.format(host, type(host_cache)))
# Update the existing facts
host_cache |= facts
# Save the facts back to the backing store
self._fact_cache[host] = host_cache
def set_nonpersistent_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
if not isinstance(facts, Mapping):
raise AnsibleAssertionError("the type of 'facts' to set for nonpersistent_facts should be a Mapping but is a %s" % type(facts))
try:
self._nonpersistent_fact_cache[host] |= facts
except KeyError:
self._nonpersistent_fact_cache[host] = facts
def set_host_variable(self, host, varname, value):
'''
Sets a value in the vars_cache for a host.
'''
if host not in self._vars_cache:
self._vars_cache[host] = dict()
if varname in self._vars_cache[host] and isinstance(self._vars_cache[host][varname], MutableMapping) and isinstance(value, MutableMapping):
self._vars_cache[host] = combine_vars(self._vars_cache[host], {varname: value})
else:
self._vars_cache[host][varname] = value
class VarsWithSources(MutableMapping):
'''
Dict-like class for vars that also provides source information for each var
This class can only store the source for top-level vars. It does no tracking
on its own, just shows a debug message with the information that it is provided
when a particular var is accessed.
'''
def __init__(self, *args, **kwargs):
''' Dict-compatible constructor '''
self.data = dict(*args, **kwargs)
self.sources = {}
@classmethod
def new_vars_with_sources(cls, data, sources):
''' Alternate constructor method to instantiate class with sources '''
v = cls(data)
v.sources = sources
return v
def get_source(self, key):
return self.sources.get(key, None)
def __getitem__(self, key):
val = self.data[key]
# See notes in the VarsWithSources docstring for caveats and limitations of the source tracking
display.debug("variable '%s' from source: %s" % (key, self.sources.get(key, "unknown")))
return val
def __setitem__(self, key, value):
self.data[key] = value
def __delitem__(self, key):
del self.data[key]
def __iter__(self):
return iter(self.data)
def __len__(self):
return len(self.data)
# Prevent duplicate debug messages by defining our own __contains__ pointing at the underlying dict
def __contains__(self, key):
return self.data.__contains__(key)
def copy(self):
return VarsWithSources.new_vars_with_sources(self.data.copy(), self.sources.copy())
def __or__(self, other):
if isinstance(other, MutableMapping):
c = self.data.copy()
c.update(other)
return c
return NotImplemented
def __ror__(self, other):
if isinstance(other, MutableMapping):
c = self.__class__()
c.update(other)
c.update(self.data)
return c
return NotImplemented
def __ior__(self, other):
self.data.update(other)
return self.data
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,264 |
delegate_to: "{{var}}" when: var != "" causes a "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead" error on 2.16
|
### Summary
I have a task to conditionally install an SSH key on another machine:
```yaml
- authorized_key: ...
delegate_to: "{{ jenkins_install_key_on }}"
when: jenkins_install_key_on != ""
```
This works fine when `jenkins_install_key_on` is set to a non-blank value, but fails when `jenkins_install_key_on` is set to an empty string:
```
fatal: [jammy -> {{ jenkins_install_key_on }}]: FAILED! => {"msg": "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead"}
```
It used to work with ansible-core 2.15 and older.
### Issue Type
Bug Report
### Component Name
delegate_to
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = /home/mg/src/deployments/provisioning/ansible.cfg
configured module search path = ['/home/mg/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible
ansible collection location = /home/mg/.ansible/collections:/usr/share/ansible/collections
executable location = /home/mg/.local/bin/ansible
python version = 3.11.6 (main, Oct 8 2023, 05:06:43) [GCC 13.2.0] (/home/mg/.local/pipx/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
ACTION_WARNINGS(/home/mg/src/deployments/provisioning/ansible.cfg) = False
CACHE_PLUGIN(/home/mg/src/deployments/provisioning/ansible.cfg) = jsonfile
CACHE_PLUGIN_CONNECTION(/home/mg/src/deployments/provisioning/ansible.cfg) = .cache/facts/
CACHE_PLUGIN_TIMEOUT(/home/mg/src/deployments/provisioning/ansible.cfg) = 86400
CALLBACKS_ENABLED(/home/mg/src/deployments/provisioning/ansible.cfg) = ['fancy_html']
CONFIG_FILE() = /home/mg/src/deployments/provisioning/ansible.cfg
DEFAULT_FORKS(/home/mg/src/deployments/provisioning/ansible.cfg) = 15
DEFAULT_GATHERING(/home/mg/src/deployments/provisioning/ansible.cfg) = smart
DEFAULT_HOST_LIST(/home/mg/src/deployments/provisioning/ansible.cfg) = ['/home/mg/src/deployments/provisioning/inventory']
DEFAULT_LOG_PATH(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/.cache/ansible.log
DEFAULT_REMOTE_USER(/home/mg/src/deployments/provisioning/ansible.cfg) = root
DEFAULT_STDOUT_CALLBACK(/home/mg/src/deployments/provisioning/ansible.cfg) = yaml
DEFAULT_VAULT_PASSWORD_FILE(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/askpas>
EDITOR(env: EDITOR) = vim
INTERPRETER_PYTHON(/home/mg/src/deployments/provisioning/ansible.cfg) = python3
RETRY_FILES_ENABLED(/home/mg/src/deployments/provisioning/ansible.cfg) = False
CACHE:
=====
jsonfile:
________
_timeout(/home/mg/src/deployments/provisioning/ansible.cfg) = 86400
_uri(/home/mg/src/deployments/provisioning/ansible.cfg) = /home/mg/src/deployments/provisioning/.cache/facts
CONNECTION:
==========
paramiko_ssh:
____________
remote_user(/home/mg/src/deployments/provisioning/ansible.cfg) = root
ssh:
___
remote_user(/home/mg/src/deployments/provisioning/ansible.cfg) = root
```
### OS / Environment
Ubuntu 23.10
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
- hosts: all
gather_facts: no
tasks:
- debug: msg="hello"
delegate_to: "{{ var }}"
when: var != ""
vars:
var: ""
```
### Expected Results
I expect the task to be skipped.
### Actual Results
```console
$ ansible-playbook -i localhost, ansible-delegate-to-blank.yml -vvvv
ansible-playbook [core 2.16.0]
config file = None
configured module search path = ['/home/mg/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible
ansible collection location = /home/mg/.ansible/collections:/usr/share/ansible/collections
executable location = /home/mg/.local/bin/ansible-playbook
python version = 3.11.6 (main, Oct 8 2023, 05:06:43) [GCC 13.2.0] (/home/mg/.local/pipx/venvs/ansible/bin/python)
jinja version = 3.1.2
libyaml = True
No config file found; using defaults
setting up inventory plugins
Loading collection ansible.builtin from
Set default localhost to localhost
Parsed localhost, inventory source with host_list plugin
Loading callback plugin default of type stdout, v2.0 from /home/mg/.local/pipx/venvs/ansible/lib/python3.11/site-packages/ansible/plugins/callback/default.py
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: ansible-delegate-to-blank.yml **************************************************************************************
Positional arguments: ansible-delegate-to-blank.yml
verbosity: 4
connection: ssh
become_method: sudo
tags: ('all',)
inventory: ('localhost,',)
forks: 5
1 plays in ansible-delegate-to-blank.yml
PLAY [all] *******************************************************************************************************************
TASK [debug] *****************************************************************************************************************
task path: /home/mg/tmp/ansible-delegate-to-blank.yml:4
fatal: [localhost -> {{ var }}]: FAILED! => {
"msg": "Supplied entity must be Host or Group, got <class 'ansible.inventory.host.Host'> instead"
}
PLAY RECAP *******************************************************************************************************************
localhost : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82264
|
https://github.com/ansible/ansible/pull/82319
|
3a42a0036875c8cab6a62ab9ea67a365e1dd4781
|
6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0
| 2023-11-22T08:22:01Z |
python
| 2023-12-04T15:19:12Z |
test/integration/targets/delegate_to/test_delegate_to.yml
|
- hosts: testhost3
vars:
template_role: ./roles/test_template
output_dir: "{{ playbook_dir }}"
templated_var: foo
templated_dict: { 'hello': 'world' }
tasks:
- name: Test no delegate_to
setup:
register: setup_results
- assert:
that:
- '"127.0.0.3" in setup_results.ansible_facts.ansible_env["SSH_CONNECTION"]'
- name: Test delegate_to with host in inventory
setup:
register: setup_results
delegate_to: testhost4
- debug: var=setup_results
- assert:
that:
- '"127.0.0.4" in setup_results.ansible_facts.ansible_env["SSH_CONNECTION"]'
- name: Test delegate_to with host not in inventory
setup:
register: setup_results
delegate_to: 127.0.0.254
- assert:
that:
- '"127.0.0.254" in setup_results.ansible_facts.ansible_env["SSH_CONNECTION"]'
#
# Smoketest some other modules do not error as a canary
#
- name: Test file works with delegate_to and a host in inventory
file: path={{ output_dir }}/foo.txt mode=0644 state=touch
delegate_to: testhost4
- name: Test file works with delegate_to and a host not in inventory
file: path={{ output_dir }}/tmp.txt mode=0644 state=touch
delegate_to: 127.0.0.254
- name: Test template works with delegate_to and a host in inventory
template: src={{ template_role }}/templates/foo.j2 dest={{ output_dir }}/foo.txt
delegate_to: testhost4
- name: Test template works with delegate_to and a host not in inventory
template: src={{ template_role }}/templates/foo.j2 dest={{ output_dir }}/foo.txt
delegate_to: 127.0.0.254
- name: remove test file
file: path={{ output_dir }}/foo.txt state=absent
- name: remove test file
file: path={{ output_dir }}/tmp.txt state=absent
- name: verify delegation with per host vars
hosts: testhost6
gather_facts: yes
tasks:
- debug: msg={{ansible_facts['env']}}
- name: ensure normal facts still work as expected
assert:
that:
- '"127.0.0.3" in ansible_facts["env"]["SSH_CONNECTION"]'
- name: Test delegate_to with other host defined using same named var
setup:
register: setup_results
delegate_to: testhost7
- debug: msg={{setup_results.ansible_facts.ansible_env}}
- name: verify ssh plugin resolves variable for ansible_host correctly
assert:
that:
- '"127.0.0.4" in setup_results.ansible_facts.ansible_env["SSH_CONNECTION"]'
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 76,727 |
lineinfile: Fails when creating a backup of Ubuntu 21.10's /etc/resolv.conf (symlink to "../run/systemd/resolve/stub-resolv.conf")
|
### Summary
https://github.com/ansible/ansible/issues/31982 is back.
### Issue Type
Bug Report
### Component Name
lineinfile
### Ansible Version
```console
$ ansible --version
ansible 2.10.8
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3/dist-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.7 (default, Sep 10 2021, 14:59:43) [GCC 11.2.0]
```
### Configuration
```console
$ ansible-config dump --only-changed
$
```
### OS / Environment
Ubuntu 21.10 Server in a VirtualBox, on server and clients, nothing special
### Steps to Reproduce
```
- name: "Check DNS configuration: Remove search list"
ansible.builtin.lineinfile:
backup: "yes"
path: "/etc/resolv.conf"
line: "search x.tld"
state: "absent"
- name: "Check DNS configuration: Cache servers present"
ansible.builtin.lineinfile:
backup: "yes"
line: "{{ item }}"
path: "/etc/resolv.conf"
state: "present"
with_items:
- "nameserver x.x.x.x"
- "nameserver y.y.y.y"
```
### Expected Results
The file is modified, a backup is created.
### Actual Results
```console
<x.x.x.x> (1, b'\r\n{"path": "/etc/resolv.conf.2410.2022-01-11@10:36:56~", "details": "Error while setting attributes: /etc/resolv.conf.2410.2022-01-11@10:36:56~: Operation not supported\\n", "exception": "Traceback (most recent call last):\\n File \\"/tmp/ansible_ansible.builtin.lineinfile_payload_pvwbc704/ansible_ansible.builtin.lineinfile_payload.zip/ansible/module_utils/basic.py\\", line 1256, in set_attributes_if_different\\n raise Exception(\\"Error while setting attributes: %s\\" % (out + err))\\nException: Error while setting attributes: /etc/resolv.conf.2410.2022-01-11@10:36:56~: Operation not supported\\n\\n", "failed": true, "msg": "chattr failed", "uid": 101, "gid": 103, "owner": "systemd-resolve", "group": "systemd-resolve", "mode": "0644", "state": "file", "size": 920, "invocation": {"module_args": {"backup": true, "line": "nameserver x.x.x.x", "path": "/etc/resolv.conf", "state": "present", "backrefs": false, "create": false, "firstmatch": false, "unsafe_writes": false, "regexp": null, "insertafter": null, "insertbefore": null, "validate": null, "mode": null, "owner": null, "group": null, "seuser": null, "serole": null, "selevel": null, "setype": null, "attributes": null}}}\r\n', b'Shared connection to x.x.x.x closed.\r\n')
<x.x.x.x> Failed to connect to the host via ssh: Shared connection to x.x.x.x closed.
<x.x.x.x> ESTABLISH SSH CONNECTION FOR USER: root
<x.x.x.x> SSH: EXEC ssh -C -o ControlMaster=auto -o ControlPersist=60s -o KbdInteractiveAuthentication=no -o PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o 'User="root"' -o ConnectTimeout=10 -o ControlPath=/root/.ansible/cp/4c20d84404 x.x.x.x '/bin/sh -c '"'"'rm -f -r /root/.ansible/tmp/ansible-tmp-1641897416.2652106-3817-66846454678877/ > /dev/null 2>&1 && sleep 0'"'"''
The full traceback is:
Traceback (most recent call last):
File "/tmp/ansible_ansible.builtin.lineinfile_payload_bccsus14/ansible_ansible.builtin.lineinfile_payload.zip/ansible/module_utils/basic.py", line 1256, in set_attributes_if_different
raise Exception("Error while setting attributes: %s" % (out + err))
Exception: Error while setting attributes: /etc/resolv.conf.2577.2022-01-11@10:36:56~: Operation not supported
failed: [x.x.x.x] (item=nameserver x.x.x.x) => {
"ansible_loop_var": "item",
"changed": false,
"details": "Error while setting attributes: /etc/resolv.conf.2577.2022-01-11@10:36:56~: Operation not supported\n",
"gid": 103,
"group": "systemd-resolve",
"invocation": {
"module_args": {
"attributes": null,
"backrefs": false,
"backup": true,
"create": false,
"firstmatch": false,
"group": null,
"insertafter": null,
"insertbefore": null,
"line": "nameserver x.x.x.x",
"mode": null,
"owner": null,
"path": "/etc/resolv.conf",
"regexp": null,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"state": "present",
"unsafe_writes": false,
"validate": null
}
},
"item": "nameserver x.x.x.x",
"mode": "0644",
"msg": "chattr failed",
"owner": "systemd-resolve",
"path": "/etc/resolv.conf.2577.2022-01-11@10:36:56~",
"size": 946,
"state": "file",
"uid": 101
}
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/76727
|
https://github.com/ansible/ansible/pull/78707
|
dd0138ba2127eac0e809d68e00ae117df56db77e
|
9b002d2e63ec0d39c7c3025dfd4be7489c82016b
| 2022-01-11T11:07:14Z |
python
| 2023-12-06T17:02:51Z |
changelogs/fragments/76727-chattr-fix-for-backups-of-symlinks.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 76,727 |
lineinfile: Fails when creating a backup of Ubuntu 21.10's /etc/resolv.conf (symlink to "../run/systemd/resolve/stub-resolv.conf")
|
### Summary
https://github.com/ansible/ansible/issues/31982 is back.
### Issue Type
Bug Report
### Component Name
lineinfile
### Ansible Version
```console
$ ansible --version
ansible 2.10.8
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3/dist-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.7 (default, Sep 10 2021, 14:59:43) [GCC 11.2.0]
```
### Configuration
```console
$ ansible-config dump --only-changed
$
```
### OS / Environment
Ubuntu 21.10 Server in a VirtualBox, on server and clients, nothing special
### Steps to Reproduce
```
- name: "Check DNS configuration: Remove search list"
ansible.builtin.lineinfile:
backup: "yes"
path: "/etc/resolv.conf"
line: "search x.tld"
state: "absent"
- name: "Check DNS configuration: Cache servers present"
ansible.builtin.lineinfile:
backup: "yes"
line: "{{ item }}"
path: "/etc/resolv.conf"
state: "present"
with_items:
- "nameserver x.x.x.x"
- "nameserver y.y.y.y"
```
### Expected Results
The file is modified, a backup is created.
### Actual Results
```console
<x.x.x.x> (1, b'\r\n{"path": "/etc/resolv.conf.2410.2022-01-11@10:36:56~", "details": "Error while setting attributes: /etc/resolv.conf.2410.2022-01-11@10:36:56~: Operation not supported\\n", "exception": "Traceback (most recent call last):\\n File \\"/tmp/ansible_ansible.builtin.lineinfile_payload_pvwbc704/ansible_ansible.builtin.lineinfile_payload.zip/ansible/module_utils/basic.py\\", line 1256, in set_attributes_if_different\\n raise Exception(\\"Error while setting attributes: %s\\" % (out + err))\\nException: Error while setting attributes: /etc/resolv.conf.2410.2022-01-11@10:36:56~: Operation not supported\\n\\n", "failed": true, "msg": "chattr failed", "uid": 101, "gid": 103, "owner": "systemd-resolve", "group": "systemd-resolve", "mode": "0644", "state": "file", "size": 920, "invocation": {"module_args": {"backup": true, "line": "nameserver x.x.x.x", "path": "/etc/resolv.conf", "state": "present", "backrefs": false, "create": false, "firstmatch": false, "unsafe_writes": false, "regexp": null, "insertafter": null, "insertbefore": null, "validate": null, "mode": null, "owner": null, "group": null, "seuser": null, "serole": null, "selevel": null, "setype": null, "attributes": null}}}\r\n', b'Shared connection to x.x.x.x closed.\r\n')
<x.x.x.x> Failed to connect to the host via ssh: Shared connection to x.x.x.x closed.
<x.x.x.x> ESTABLISH SSH CONNECTION FOR USER: root
<x.x.x.x> SSH: EXEC ssh -C -o ControlMaster=auto -o ControlPersist=60s -o KbdInteractiveAuthentication=no -o PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o 'User="root"' -o ConnectTimeout=10 -o ControlPath=/root/.ansible/cp/4c20d84404 x.x.x.x '/bin/sh -c '"'"'rm -f -r /root/.ansible/tmp/ansible-tmp-1641897416.2652106-3817-66846454678877/ > /dev/null 2>&1 && sleep 0'"'"''
The full traceback is:
Traceback (most recent call last):
File "/tmp/ansible_ansible.builtin.lineinfile_payload_bccsus14/ansible_ansible.builtin.lineinfile_payload.zip/ansible/module_utils/basic.py", line 1256, in set_attributes_if_different
raise Exception("Error while setting attributes: %s" % (out + err))
Exception: Error while setting attributes: /etc/resolv.conf.2577.2022-01-11@10:36:56~: Operation not supported
failed: [x.x.x.x] (item=nameserver x.x.x.x) => {
"ansible_loop_var": "item",
"changed": false,
"details": "Error while setting attributes: /etc/resolv.conf.2577.2022-01-11@10:36:56~: Operation not supported\n",
"gid": 103,
"group": "systemd-resolve",
"invocation": {
"module_args": {
"attributes": null,
"backrefs": false,
"backup": true,
"create": false,
"firstmatch": false,
"group": null,
"insertafter": null,
"insertbefore": null,
"line": "nameserver x.x.x.x",
"mode": null,
"owner": null,
"path": "/etc/resolv.conf",
"regexp": null,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"state": "present",
"unsafe_writes": false,
"validate": null
}
},
"item": "nameserver x.x.x.x",
"mode": "0644",
"msg": "chattr failed",
"owner": "systemd-resolve",
"path": "/etc/resolv.conf.2577.2022-01-11@10:36:56~",
"size": 946,
"state": "file",
"uid": 101
}
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/76727
|
https://github.com/ansible/ansible/pull/78707
|
dd0138ba2127eac0e809d68e00ae117df56db77e
|
9b002d2e63ec0d39c7c3025dfd4be7489c82016b
| 2022-01-11T11:07:14Z |
python
| 2023-12-06T17:02:51Z |
lib/ansible/module_utils/basic.py
|
# Copyright (c), Michael DeHaan <[email protected]>, 2012-2013
# Copyright (c), Toshio Kuratomi <[email protected]> 2016
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import annotations
import json
import sys
# Used for determining if the system is running a new enough python version
# and should only restrict on our documented minimum versions
_PY_MIN = (3, 7)
if sys.version_info < _PY_MIN:
print(json.dumps(dict(
failed=True,
msg=f"ansible-core requires a minimum of Python version {'.'.join(map(str, _PY_MIN))}. Current version: {''.join(sys.version.splitlines())}",
)))
sys.exit(1)
# Ansible modules can be written in any language.
# The functions available here can be used to do many common tasks,
# to simplify development of Python modules.
import __main__
import atexit
import errno
import datetime
import grp
import fcntl
import locale
import os
import pwd
import platform
import re
import select
import selectors
import shlex
import shutil
import signal
import stat
import subprocess
import tempfile
import time
import traceback
import types
from itertools import chain, repeat
try:
import syslog
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG = False
try:
from systemd import journal, daemon as systemd_daemon
# Makes sure that systemd.journal has method sendv()
# Double check that journal has method sendv (some packages don't)
# check if the system is running under systemd
has_journal = hasattr(journal, 'sendv') and systemd_daemon.booted()
except (ImportError, AttributeError):
# AttributeError would be caused from use of .booted() if wrong systemd
has_journal = False
HAVE_SELINUX = False
try:
from ansible.module_utils.compat import selinux
HAVE_SELINUX = True
except ImportError:
pass
# Python2 & 3 way to get NoneType
NoneType = type(None)
from ._text import to_native, to_bytes, to_text
from ansible.module_utils.common.text.converters import (
jsonify,
container_to_bytes as json_dict_unicode_to_bytes,
container_to_text as json_dict_bytes_to_unicode,
)
from ansible.module_utils.common.arg_spec import ModuleArgumentSpecValidator
from ansible.module_utils.common.text.formatters import (
lenient_lowercase,
bytes_to_human,
human_to_bytes,
SIZE_RANGES,
)
import hashlib
def _get_available_hash_algorithms():
"""Return a dictionary of available hash function names and their associated function."""
try:
# Algorithms available in Python 2.7.9+ and Python 3.2+
# https://docs.python.org/2.7/library/hashlib.html#hashlib.algorithms_available
# https://docs.python.org/3.2/library/hashlib.html#hashlib.algorithms_available
algorithm_names = hashlib.algorithms_available
except AttributeError:
# Algorithms in Python 2.7.x (used only for Python 2.7.0 through 2.7.8)
# https://docs.python.org/2.7/library/hashlib.html#hashlib.hashlib.algorithms
algorithm_names = set(hashlib.algorithms)
algorithms = {}
for algorithm_name in algorithm_names:
algorithm_func = getattr(hashlib, algorithm_name, None)
if algorithm_func:
try:
# Make sure the algorithm is actually available for use.
# Not all algorithms listed as available are actually usable.
# For example, md5 is not available in FIPS mode.
algorithm_func()
except Exception:
pass
else:
algorithms[algorithm_name] = algorithm_func
return algorithms
AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms()
from ansible.module_utils.six.moves.collections_abc import (
KeysView,
Mapping, MutableMapping,
Sequence, MutableSequence,
Set, MutableSet,
)
from ansible.module_utils.common.locale import get_best_parsable_locale
from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.file import (
_PERM_BITS as PERM_BITS,
_EXEC_PERM_BITS as EXEC_PERM_BITS,
_DEFAULT_PERM as DEFAULT_PERM,
is_executable,
format_attributes,
get_flags_from_attributes,
FILE_ATTRIBUTES,
)
from ansible.module_utils.common.sys_info import (
get_distribution,
get_distribution_version,
get_platform_subclass,
)
from ansible.module_utils.common.parameters import (
env_fallback,
remove_values,
sanitize_keys,
DEFAULT_TYPE_VALIDATORS,
PASS_VARS,
PASS_BOOLS,
)
from ansible.module_utils.errors import AnsibleFallbackNotFound, AnsibleValidationErrorMultiple, UnsupportedError
from ansible.module_utils.six import (
PY2,
PY3,
b,
binary_type,
integer_types,
iteritems,
string_types,
text_type,
)
from ansible.module_utils.six.moves import map, reduce, shlex_quote
from ansible.module_utils.common.validation import (
check_missing_parameters,
safe_eval,
)
from ansible.module_utils.common._utils import get_all_subclasses as _get_all_subclasses
from ansible.module_utils.parsing.convert_bool import BOOLEANS, BOOLEANS_FALSE, BOOLEANS_TRUE, boolean
from ansible.module_utils.common.warnings import (
deprecate,
get_deprecation_messages,
get_warning_messages,
warn,
)
# Note: When getting Sequence from collections, it matches with strings. If
# this matters, make sure to check for strings before checking for sequencetype
SEQUENCETYPE = frozenset, KeysView, Sequence
PASSWORD_MATCH = re.compile(r'^(?:.+[-_\s])?pass(?:[-_\s]?(?:word|phrase|wrd|wd)?)(?:[-_\s].+)?$', re.I)
imap = map
try:
# Python 2
unicode # type: ignore[used-before-def] # pylint: disable=used-before-assignment
except NameError:
# Python 3
unicode = text_type
try:
# Python 2
basestring # type: ignore[used-before-def,has-type] # pylint: disable=used-before-assignment
except NameError:
# Python 3
basestring = string_types
# End of deprecated names
# Internal global holding passed in params. This is consulted in case
# multiple AnsibleModules are created. Otherwise each AnsibleModule would
# attempt to read from stdin. Other code should not use this directly as it
# is an internal implementation detail
_ANSIBLE_ARGS = None
FILE_COMMON_ARGUMENTS = dict(
# These are things we want. About setting metadata (mode, ownership, permissions in general) on
# created files (these are used by set_fs_attributes_if_different and included in
# load_file_common_arguments)
mode=dict(type='raw'),
owner=dict(type='str'),
group=dict(type='str'),
seuser=dict(type='str'),
serole=dict(type='str'),
selevel=dict(type='str'),
setype=dict(type='str'),
attributes=dict(type='str', aliases=['attr']),
unsafe_writes=dict(type='bool', default=False, fallback=(env_fallback, ['ANSIBLE_UNSAFE_WRITES'])), # should be available to any module using atomic_move
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
# Used for parsing symbolic file perms
MODE_OPERATOR_RE = re.compile(r'[+=-]')
USERS_RE = re.compile(r'^[ugo]+$')
PERMS_RE = re.compile(r'^[rwxXstugo]*$')
#
# Deprecated functions
#
def get_platform():
'''
**Deprecated** Use :py:func:`platform.system` directly.
:returns: Name of the platform the module is running on in a native string
Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is
the result of calling :py:func:`platform.system`.
'''
return platform.system()
# End deprecated functions
#
# Compat shims
#
def load_platform_subclass(cls, *args, **kwargs):
"""**Deprecated**: Use ansible.module_utils.common.sys_info.get_platform_subclass instead"""
platform_cls = get_platform_subclass(cls)
return super(cls, platform_cls).__new__(platform_cls)
def get_all_subclasses(cls):
"""**Deprecated**: Use ansible.module_utils.common._utils.get_all_subclasses instead"""
return list(_get_all_subclasses(cls))
# End compat shims
def heuristic_log_sanitize(data, no_log_values=None):
''' Remove strings that look like passwords from log messages '''
# Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are
# not passwds
# begin: start of a passwd containing string
# end: end of a passwd containing string
# sep: char between user and passwd
# prev_begin: where in the overall string to start a search for
# a passwd
# sep_search_end: where in the string to end a search for the sep
data = to_native(data)
output = []
begin = len(data)
prev_begin = begin
sep = 1
while sep:
# Find the potential end of a passwd
try:
end = data.rindex('@', 0, begin)
except ValueError:
# No passwd in the rest of the data
output.insert(0, data[0:begin])
break
# Search for the beginning of a passwd
sep = None
sep_search_end = end
while not sep:
# URL-style username+password
try:
begin = data.rindex('://', 0, sep_search_end)
except ValueError:
# No url style in the data, check for ssh style in the
# rest of the string
begin = 0
# Search for separator
try:
sep = data.index(':', begin + 3, end)
except ValueError:
# No separator; choices:
if begin == 0:
# Searched the whole string so there's no password
# here. Return the remaining data
output.insert(0, data[0:prev_begin])
break
# Search for a different beginning of the password field.
sep_search_end = begin
continue
if sep:
# Password was found; remove it.
output.insert(0, data[end:prev_begin])
output.insert(0, '********')
output.insert(0, data[begin:sep + 1])
prev_begin = begin
output = ''.join(output)
if no_log_values:
output = remove_values(output, no_log_values)
return output
def _load_params():
''' read the modules parameters and store them globally.
This function may be needed for certain very dynamic custom modules which
want to process the parameters that are being handed the module. Since
this is so closely tied to the implementation of modules we cannot
guarantee API stability for it (it may change between versions) however we
will try not to break it gratuitously. It is certainly more future-proof
to call this function and consume its outputs than to implement the logic
inside it as a copy in your own code.
'''
global _ANSIBLE_ARGS
if _ANSIBLE_ARGS is not None:
buffer = _ANSIBLE_ARGS
else:
# debug overrides to read args from file or cmdline
# Avoid tracebacks when locale is non-utf8
# We control the args and we pass them as utf8
if len(sys.argv) > 1:
if os.path.isfile(sys.argv[1]):
fd = open(sys.argv[1], 'rb')
buffer = fd.read()
fd.close()
else:
buffer = sys.argv[1]
if PY3:
buffer = buffer.encode('utf-8', errors='surrogateescape')
# default case, read from stdin
else:
if PY2:
buffer = sys.stdin.read()
else:
buffer = sys.stdin.buffer.read()
_ANSIBLE_ARGS = buffer
try:
params = json.loads(buffer.decode('utf-8'))
except ValueError:
# This helper is used too early for fail_json to work.
print('\n{"msg": "Error: Module unable to decode stdin/parameters as valid JSON. Unable to parse what parameters were passed", "failed": true}')
sys.exit(1)
if PY2:
params = json_dict_unicode_to_bytes(params)
try:
return params['ANSIBLE_MODULE_ARGS']
except KeyError:
# This helper does not have access to fail_json so we have to print
# json output on our own.
print('\n{"msg": "Error: Module unable to locate ANSIBLE_MODULE_ARGS in JSON data from stdin. Unable to figure out what parameters were passed", '
'"failed": true}')
sys.exit(1)
def missing_required_lib(library, reason=None, url=None):
hostname = platform.node()
msg = "Failed to import the required Python library (%s) on %s's Python %s." % (library, hostname, sys.executable)
if reason:
msg += " This is required %s." % reason
if url:
msg += " See %s for more info." % url
msg += (" Please read the module documentation and install it in the appropriate location."
" If the required library is installed, but Ansible is using the wrong Python interpreter,"
" please consult the documentation on ansible_python_interpreter")
return msg
class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
mutually_exclusive=None, required_together=None,
required_one_of=None, add_file_common_args=False,
supports_check_mode=False, required_if=None, required_by=None):
'''
Common code for quickly building an ansible module in Python
(although you can write modules with anything that can return JSON).
See :ref:`developing_modules_general` for a general introduction
and :ref:`developing_program_flow_modules` for more detailed explanation.
'''
self._name = os.path.basename(__file__) # initialize name until we can parse from options
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
self.bypass_checks = bypass_checks
self.no_log = no_log
self.mutually_exclusive = mutually_exclusive
self.required_together = required_together
self.required_one_of = required_one_of
self.required_if = required_if
self.required_by = required_by
self.cleanup_files = []
self._debug = False
self._diff = False
self._socket_path = None
self._shell = None
self._syslog_facility = 'LOG_USER'
self._verbosity = 0
# May be used to set modifications to the environment for any
# run_command invocation
self.run_command_environ_update = {}
self._clean = {}
self._string_conversion_action = ''
self.aliases = {}
self._legal_inputs = []
self._options_context = list()
self._tmpdir = None
if add_file_common_args:
for k, v in FILE_COMMON_ARGUMENTS.items():
if k not in self.argument_spec:
self.argument_spec[k] = v
# Save parameter values that should never be logged
self.no_log_values = set()
# check the locale as set by the current environment, and reset to
# a known valid (LANG=C) if it's an invalid/unavailable locale
self._check_locale()
self._load_params()
self._set_internal_properties()
self.validator = ModuleArgumentSpecValidator(self.argument_spec,
self.mutually_exclusive,
self.required_together,
self.required_one_of,
self.required_if,
self.required_by,
)
self.validation_result = self.validator.validate(self.params)
self.params.update(self.validation_result.validated_parameters)
self.no_log_values.update(self.validation_result._no_log_values)
self.aliases.update(self.validation_result._aliases)
try:
error = self.validation_result.errors[0]
if isinstance(error, UnsupportedError) and self._ignore_unknown_opts:
error = None
except IndexError:
error = None
# Fail for validation errors, even in check mode
if error:
msg = self.validation_result.errors.msg
if isinstance(error, UnsupportedError):
msg = "Unsupported parameters for ({name}) {kind}: {msg}".format(name=self._name, kind='module', msg=msg)
self.fail_json(msg=msg)
if self.check_mode and not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module (%s) does not support check mode" % self._name)
# This is for backwards compatibility only.
self._CHECK_ARGUMENT_TYPES_DISPATCHER = DEFAULT_TYPE_VALIDATORS
if not self.no_log:
self._log_invocation()
# selinux state caching
self._selinux_enabled = None
self._selinux_mls_enabled = None
self._selinux_initial_context = None
# finally, make sure we're in a sane working dir
self._set_cwd()
@property
def tmpdir(self):
# if _ansible_tmpdir was not set and we have a remote_tmp,
# the module needs to create it and clean it up once finished.
# otherwise we create our own module tmp dir from the system defaults
if self._tmpdir is None:
basedir = None
if self._remote_tmp is not None:
basedir = os.path.expanduser(os.path.expandvars(self._remote_tmp))
if basedir is not None and not os.path.exists(basedir):
try:
os.makedirs(basedir, mode=0o700)
except (OSError, IOError) as e:
self.warn("Unable to use %s as temporary directory, "
"failing back to system: %s" % (basedir, to_native(e)))
basedir = None
else:
self.warn("Module remote_tmp %s did not exist and was "
"created with a mode of 0700, this may cause"
" issues when running as another user. To "
"avoid this, create the remote_tmp dir with "
"the correct permissions manually" % basedir)
basefile = "ansible-moduletmp-%s-" % time.time()
try:
tmpdir = tempfile.mkdtemp(prefix=basefile, dir=basedir)
except (OSError, IOError) as e:
self.fail_json(
msg="Failed to create remote module tmp path at dir %s "
"with prefix %s: %s" % (basedir, basefile, to_native(e))
)
if not self._keep_remote_files:
atexit.register(shutil.rmtree, tmpdir)
self._tmpdir = tmpdir
return self._tmpdir
def warn(self, warning):
warn(warning)
self.log('[WARNING] %s' % warning)
def deprecate(self, msg, version=None, date=None, collection_name=None):
if version is not None and date is not None:
raise AssertionError("implementation error -- version and date must not both be set")
deprecate(msg, version=version, date=date, collection_name=collection_name)
# For compatibility, we accept that neither version nor date is set,
# and treat that the same as if version would not have been set
if date is not None:
self.log('[DEPRECATION WARNING] %s %s' % (msg, date))
else:
self.log('[DEPRECATION WARNING] %s %s' % (msg, version))
def load_file_common_arguments(self, params, path=None):
'''
many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly
available to all modules and they can share code.
Allows to overwrite the path/dest module argument by providing path.
'''
if path is None:
path = params.get('path', params.get('dest', None))
if path is None:
return {}
else:
path = os.path.expanduser(os.path.expandvars(path))
b_path = to_bytes(path, errors='surrogate_or_strict')
# if the path is a symlink, and we're following links, get
# the target of the link instead for testing
if params.get('follow', False) and os.path.islink(b_path):
b_path = os.path.realpath(b_path)
path = to_native(b_path)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
# selinux related options
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
secontext.append(selevel)
default_secontext = self.selinux_default_context(path)
for i in range(len(default_secontext)):
if i is not None and secontext[i] == '_default':
secontext[i] = default_secontext[i]
attributes = params.get('attributes', None)
return dict(
path=path, mode=mode, owner=owner, group=group,
seuser=seuser, serole=serole, setype=setype,
selevel=selevel, secontext=secontext, attributes=attributes,
)
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
# will get the selevel as part of the context returned
# by selinux.lgetfilecon().
def selinux_mls_enabled(self):
if self._selinux_mls_enabled is None:
self._selinux_mls_enabled = HAVE_SELINUX and selinux.is_selinux_mls_enabled() == 1
return self._selinux_mls_enabled
def selinux_enabled(self):
if self._selinux_enabled is None:
self._selinux_enabled = HAVE_SELINUX and selinux.is_selinux_enabled() == 1
return self._selinux_enabled
# Determine whether we need a placeholder for selevel/mls
def selinux_initial_context(self):
if self._selinux_initial_context is None:
self._selinux_initial_context = [None, None, None]
if self.selinux_mls_enabled():
self._selinux_initial_context.append(None)
return self._selinux_initial_context
# If selinux fails to find a default, return an array of None
def selinux_default_context(self, path, mode=0):
context = self.selinux_initial_context()
if not self.selinux_enabled():
return context
try:
ret = selinux.matchpathcon(to_native(path, errors='surrogate_or_strict'), mode)
except OSError:
return context
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def selinux_context(self, path):
context = self.selinux_initial_context()
if not self.selinux_enabled():
return context
try:
ret = selinux.lgetfilecon_raw(to_native(path, errors='surrogate_or_strict'))
except OSError as e:
if e.errno == errno.ENOENT:
self.fail_json(path=path, msg='path %s does not exist' % path)
else:
self.fail_json(path=path, msg='failed to retrieve selinux context')
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def user_and_group(self, path, expand=True):
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
st = os.lstat(b_path)
uid = st.st_uid
gid = st.st_gid
return (uid, gid)
def find_mount_point(self, path):
'''
Takes a path and returns its mount point
:param path: a string type with a filesystem path
:returns: the path to the mount point as a text type
'''
b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict'))
while not os.path.ismount(b_path):
b_path = os.path.dirname(b_path)
return to_text(b_path, errors='surrogate_or_strict')
def is_special_selinux_path(self, path):
"""
Returns a tuple containing (True, selinux_context) if the given path is on a
NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
mount_data = f.readlines()
f.close()
except Exception:
return (False, None)
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
if to_bytes(path_mount_point) == to_bytes(mount_point):
for fs in self._selinux_special_fs:
if fs in fstype:
special_context = self.selinux_context(path_mount_point)
return (True, special_context)
return (False, None)
def set_default_selinux_context(self, path, changed):
if not self.selinux_enabled():
return changed
context = self.selinux_default_context(path)
return self.set_context_if_different(path, context, False)
def set_context_if_different(self, path, context, changed, diff=None):
if not self.selinux_enabled():
return changed
if self.check_file_absent_if_check_mode(path):
return True
cur_context = self.selinux_context(path)
new_context = list(cur_context)
# Iterate over the current context instead of the
# argument context, which may have selevel.
(is_special_se, sp_context) = self.is_special_selinux_path(path)
if is_special_se:
new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
if context[i] is not None and context[i] != cur_context[i]:
new_context[i] = context[i]
elif context[i] is None:
new_context[i] = cur_context[i]
if cur_context != new_context:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['secontext'] = cur_context
if 'after' not in diff:
diff['after'] = {}
diff['after']['secontext'] = new_context
try:
if self.check_mode:
return True
rc = selinux.lsetfilecon(to_native(path), ':'.join(new_context))
except OSError as e:
self.fail_json(path=path, msg='invalid selinux context: %s' % to_native(e),
new_context=new_context, cur_context=cur_context, input_was=context)
if rc != 0:
self.fail_json(path=path, msg='set selinux context failed')
changed = True
return changed
def set_owner_if_different(self, path, owner, changed, diff=None, expand=True):
if owner is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
orig_uid, orig_gid = self.user_and_group(b_path, expand)
try:
uid = int(owner)
except ValueError:
try:
uid = pwd.getpwnam(owner).pw_uid
except KeyError:
path = to_text(b_path)
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
if orig_uid != uid:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['owner'] = orig_uid
if 'after' not in diff:
diff['after'] = {}
diff['after']['owner'] = uid
if self.check_mode:
return True
try:
os.lchown(b_path, uid, -1)
except (IOError, OSError) as e:
path = to_text(b_path)
self.fail_json(path=path, msg='chown failed: %s' % (to_text(e)))
changed = True
return changed
def set_group_if_different(self, path, group, changed, diff=None, expand=True):
if group is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
orig_uid, orig_gid = self.user_and_group(b_path, expand)
try:
gid = int(group)
except ValueError:
try:
gid = grp.getgrnam(group).gr_gid
except KeyError:
path = to_text(b_path)
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
if orig_gid != gid:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['group'] = orig_gid
if 'after' not in diff:
diff['after'] = {}
diff['after']['group'] = gid
if self.check_mode:
return True
try:
os.lchown(b_path, -1, gid)
except OSError:
path = to_text(b_path)
self.fail_json(path=path, msg='chgrp failed')
changed = True
return changed
def set_mode_if_different(self, path, mode, changed, diff=None, expand=True):
if mode is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
path_stat = os.lstat(b_path)
if not isinstance(mode, int):
try:
mode = int(mode, 8)
except Exception:
try:
mode = self._symbolic_mode_to_octal(path_stat, mode)
except Exception as e:
path = to_text(b_path)
self.fail_json(path=path,
msg="mode must be in octal or symbolic form",
details=to_native(e))
if mode != stat.S_IMODE(mode):
# prevent mode from having extra info or being invalid long number
path = to_text(b_path)
self.fail_json(path=path, msg="Invalid mode supplied, only permission info is allowed", details=mode)
prev_mode = stat.S_IMODE(path_stat.st_mode)
if prev_mode != mode:
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['mode'] = '0%03o' % prev_mode
if 'after' not in diff:
diff['after'] = {}
diff['after']['mode'] = '0%03o' % mode
if self.check_mode:
return True
# FIXME: comparison against string above will cause this to be executed
# every time
try:
if hasattr(os, 'lchmod'):
os.lchmod(b_path, mode)
else:
if not os.path.islink(b_path):
os.chmod(b_path, mode)
else:
# Attempt to set the perms of the symlink but be
# careful not to change the perms of the underlying
# file while trying
underlying_stat = os.stat(b_path)
os.chmod(b_path, mode)
new_underlying_stat = os.stat(b_path)
if underlying_stat.st_mode != new_underlying_stat.st_mode:
os.chmod(b_path, stat.S_IMODE(underlying_stat.st_mode))
except OSError as e:
if os.path.islink(b_path) and e.errno in (
errno.EACCES, # can't access symlink in sticky directory (stat)
errno.EPERM, # can't set mode on symbolic links (chmod)
errno.EROFS, # can't set mode on read-only filesystem
):
pass
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise
except Exception as e:
path = to_text(b_path)
self.fail_json(path=path, msg='chmod failed', details=to_native(e),
exception=traceback.format_exc())
path_stat = os.lstat(b_path)
new_mode = stat.S_IMODE(path_stat.st_mode)
if new_mode != prev_mode:
changed = True
return changed
def set_attributes_if_different(self, path, attributes, changed, diff=None, expand=True):
if attributes is None:
return changed
b_path = to_bytes(path, errors='surrogate_or_strict')
if expand:
b_path = os.path.expanduser(os.path.expandvars(b_path))
if self.check_file_absent_if_check_mode(b_path):
return True
existing = self.get_file_attributes(b_path, include_version=False)
attr_mod = '='
if attributes.startswith(('-', '+')):
attr_mod = attributes[0]
attributes = attributes[1:]
if existing.get('attr_flags', '') != attributes or attr_mod == '-':
attrcmd = self.get_bin_path('chattr')
if attrcmd:
attrcmd = [attrcmd, '%s%s' % (attr_mod, attributes), b_path]
changed = True
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
diff['before']['attributes'] = existing.get('attr_flags')
if 'after' not in diff:
diff['after'] = {}
diff['after']['attributes'] = '%s%s' % (attr_mod, attributes)
if not self.check_mode:
try:
rc, out, err = self.run_command(attrcmd)
if rc != 0 or err:
raise Exception("Error while setting attributes: %s" % (out + err))
except Exception as e:
self.fail_json(path=to_text(b_path), msg='chattr failed',
details=to_native(e), exception=traceback.format_exc())
return changed
def get_file_attributes(self, path, include_version=True):
output = {}
attrcmd = self.get_bin_path('lsattr', False)
if attrcmd:
flags = '-vd' if include_version else '-d'
attrcmd = [attrcmd, flags, path]
try:
rc, out, err = self.run_command(attrcmd)
if rc == 0:
res = out.split()
attr_flags_idx = 0
if include_version:
attr_flags_idx = 1
output['version'] = res[0].strip()
output['attr_flags'] = res[attr_flags_idx].replace('-', '').strip()
output['attributes'] = format_attributes(output['attr_flags'])
except Exception:
pass
return output
@classmethod
def _symbolic_mode_to_octal(cls, path_stat, symbolic_mode):
"""
This enables symbolic chmod string parsing as stated in the chmod man-page
This includes things like: "u=rw-x+X,g=r-x+X,o=r-x+X"
"""
new_mode = stat.S_IMODE(path_stat.st_mode)
# Now parse all symbolic modes
for mode in symbolic_mode.split(','):
# Per single mode. This always contains a '+', '-' or '='
# Split it on that
permlist = MODE_OPERATOR_RE.split(mode)
# And find all the operators
opers = MODE_OPERATOR_RE.findall(mode)
# The user(s) where it's all about is the first element in the
# 'permlist' list. Take that and remove it from the list.
# An empty user or 'a' means 'all'.
users = permlist.pop(0)
use_umask = (users == '')
if users == 'a' or users == '':
users = 'ugo'
# Check if there are illegal characters in the user list
# They can end up in 'users' because they are not split
if not USERS_RE.match(users):
raise ValueError("bad symbolic permission for mode: %s" % mode)
# Now we have two list of equal length, one contains the requested
# permissions and one with the corresponding operators.
for idx, perms in enumerate(permlist):
# Check if there are illegal characters in the permissions
if not PERMS_RE.match(perms):
raise ValueError("bad symbolic permission for mode: %s" % mode)
for user in users:
mode_to_apply = cls._get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask, new_mode)
new_mode = cls._apply_operation_to_mode(user, opers[idx], mode_to_apply, new_mode)
return new_mode
@staticmethod
def _apply_operation_to_mode(user, operator, mode_to_apply, current_mode):
if operator == '=':
if user == 'u':
mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g':
mask = stat.S_IRWXG | stat.S_ISGID
elif user == 'o':
mask = stat.S_IRWXO | stat.S_ISVTX
# mask out u, g, or o permissions from current_mode and apply new permissions
inverse_mask = mask ^ PERM_BITS
new_mode = (current_mode & inverse_mask) | mode_to_apply
elif operator == '+':
new_mode = current_mode | mode_to_apply
elif operator == '-':
new_mode = current_mode - (current_mode & mode_to_apply)
return new_mode
@staticmethod
def _get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask, prev_mode=None):
if prev_mode is None:
prev_mode = stat.S_IMODE(path_stat.st_mode)
is_directory = stat.S_ISDIR(path_stat.st_mode)
has_x_permissions = (prev_mode & EXEC_PERM_BITS) > 0
apply_X_permission = is_directory or has_x_permissions
# Get the umask, if the 'user' part is empty, the effect is as if (a) were
# given, but bits that are set in the umask are not affected.
# We also need the "reversed umask" for masking
umask = os.umask(0)
os.umask(umask)
rev_umask = umask ^ PERM_BITS
# Permission bits constants documented at:
# https://docs.python.org/3/library/stat.html#stat.S_ISUID
if apply_X_permission:
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
'o': {'X': stat.S_IXOTH},
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
'o': {'X': 0},
}
user_perms_to_modes = {
'u': {
'r': rev_umask & stat.S_IRUSR if use_umask else stat.S_IRUSR,
'w': rev_umask & stat.S_IWUSR if use_umask else stat.S_IWUSR,
'x': rev_umask & stat.S_IXUSR if use_umask else stat.S_IXUSR,
's': stat.S_ISUID,
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
'o': (prev_mode & stat.S_IRWXO) << 6},
'g': {
'r': rev_umask & stat.S_IRGRP if use_umask else stat.S_IRGRP,
'w': rev_umask & stat.S_IWGRP if use_umask else stat.S_IWGRP,
'x': rev_umask & stat.S_IXGRP if use_umask else stat.S_IXGRP,
's': stat.S_ISGID,
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
'o': (prev_mode & stat.S_IRWXO) << 3},
'o': {
'r': rev_umask & stat.S_IROTH if use_umask else stat.S_IROTH,
'w': rev_umask & stat.S_IWOTH if use_umask else stat.S_IWOTH,
'x': rev_umask & stat.S_IXOTH if use_umask else stat.S_IXOTH,
's': 0,
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
'o': prev_mode & stat.S_IRWXO},
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
def or_reduce(mode, perm):
return mode | user_perms_to_modes[user][perm]
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True):
# set modes owners and context as needed
changed = self.set_context_if_different(
file_args['path'], file_args['secontext'], changed, diff
)
changed = self.set_owner_if_different(
file_args['path'], file_args['owner'], changed, diff, expand
)
changed = self.set_group_if_different(
file_args['path'], file_args['group'], changed, diff, expand
)
changed = self.set_mode_if_different(
file_args['path'], file_args['mode'], changed, diff, expand
)
changed = self.set_attributes_if_different(
file_args['path'], file_args['attributes'], changed, diff, expand
)
return changed
def check_file_absent_if_check_mode(self, file_path):
return self.check_mode and not os.path.exists(file_path)
def set_directory_attributes_if_different(self, file_args, changed, diff=None, expand=True):
return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
def set_file_attributes_if_different(self, file_args, changed, diff=None, expand=True):
return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
def add_path_info(self, kwargs):
'''
for results that are files, supplement the info about the file
in the return path with stats about the file path.
'''
path = kwargs.get('path', kwargs.get('dest', None))
if path is None:
return kwargs
b_path = to_bytes(path, errors='surrogate_or_strict')
if os.path.exists(b_path):
(uid, gid) = self.user_and_group(path)
kwargs['uid'] = uid
kwargs['gid'] = gid
try:
user = pwd.getpwuid(uid)[0]
except KeyError:
user = str(uid)
try:
group = grp.getgrgid(gid)[0]
except KeyError:
group = str(gid)
kwargs['owner'] = user
kwargs['group'] = group
st = os.lstat(b_path)
kwargs['mode'] = '0%03o' % stat.S_IMODE(st[stat.ST_MODE])
# secontext not yet supported
if os.path.islink(b_path):
kwargs['state'] = 'link'
elif os.path.isdir(b_path):
kwargs['state'] = 'directory'
elif os.stat(b_path).st_nlink > 1:
kwargs['state'] = 'hard'
else:
kwargs['state'] = 'file'
if self.selinux_enabled():
kwargs['secontext'] = ':'.join(self.selinux_context(path))
kwargs['size'] = st[stat.ST_SIZE]
return kwargs
def _check_locale(self):
'''
Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings)
'''
try:
# setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
# fallback to the 'best' locale, per the function
# final fallback is 'C', which may cause unicode issues
# but is preferable to simply failing on unknown locale
best_locale = get_best_parsable_locale(self)
# need to set several since many tools choose to ignore documented precedence and scope
locale.setlocale(locale.LC_ALL, best_locale)
os.environ['LANG'] = best_locale
os.environ['LC_ALL'] = best_locale
os.environ['LC_MESSAGES'] = best_locale
except Exception as e:
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" %
to_native(e), exception=traceback.format_exc())
def _set_internal_properties(self, argument_spec=None, module_parameters=None):
if argument_spec is None:
argument_spec = self.argument_spec
if module_parameters is None:
module_parameters = self.params
for k in PASS_VARS:
# handle setting internal properties from internal ansible vars
param_key = '_ansible_%s' % k
if param_key in module_parameters:
if k in PASS_BOOLS:
setattr(self, PASS_VARS[k][0], self.boolean(module_parameters[param_key]))
else:
setattr(self, PASS_VARS[k][0], module_parameters[param_key])
# clean up internal top level params:
if param_key in self.params:
del self.params[param_key]
else:
# use defaults if not already set
if not hasattr(self, PASS_VARS[k][0]):
setattr(self, PASS_VARS[k][0], PASS_VARS[k][1])
def safe_eval(self, value, locals=None, include_exceptions=False):
return safe_eval(value, locals, include_exceptions)
def _load_params(self):
''' read the input and set the params attribute.
This method is for backwards compatibility. The guts of the function
were moved out in 2.1 so that custom modules could read the parameters.
'''
# debug overrides to read args from file or cmdline
self.params = _load_params()
def _log_to_syslog(self, msg):
if HAS_SYSLOG:
try:
module = 'ansible-%s' % self._name
facility = getattr(syslog, self._syslog_facility, syslog.LOG_USER)
syslog.openlog(str(module), 0, facility)
syslog.syslog(syslog.LOG_INFO, msg)
except (TypeError, ValueError) as e:
self.fail_json(
msg='Failed to log to syslog (%s). To proceed anyway, '
'disable syslog logging by setting no_target_syslog '
'to True in your Ansible config.' % to_native(e),
exception=traceback.format_exc(),
msg_to_log=msg,
)
def debug(self, msg):
if self._debug:
self.log('[debug] %s' % msg)
def log(self, msg, log_args=None):
if not self.no_log:
if log_args is None:
log_args = dict()
module = 'ansible-%s' % self._name
if isinstance(module, binary_type):
module = module.decode('utf-8', 'replace')
# 6655 - allow for accented characters
if not isinstance(msg, (binary_type, text_type)):
raise TypeError("msg should be a string (got %s)" % type(msg))
# We want journal to always take text type
# syslog takes bytes on py2, text type on py3
if isinstance(msg, binary_type):
journal_msg = msg.decode('utf-8', 'replace')
else:
# TODO: surrogateescape is a danger here on Py3
journal_msg = msg
if self._target_log_info:
journal_msg = ' '.join([self._target_log_info, journal_msg])
# ensure we clean up secrets!
journal_msg = remove_values(journal_msg, self.no_log_values)
if PY3:
syslog_msg = journal_msg
else:
syslog_msg = journal_msg.encode('utf-8', 'replace')
if has_journal:
journal_args = [("MODULE", os.path.basename(__file__))]
for arg in log_args:
name, value = (arg.upper(), str(log_args[arg]))
if name in (
'PRIORITY', 'MESSAGE', 'MESSAGE_ID',
'CODE_FILE', 'CODE_LINE', 'CODE_FUNC',
'SYSLOG_FACILITY', 'SYSLOG_IDENTIFIER',
'SYSLOG_PID',
):
name = "_%s" % name
journal_args.append((name, value))
try:
if HAS_SYSLOG:
# If syslog_facility specified, it needs to convert
# from the facility name to the facility code, and
# set it as SYSLOG_FACILITY argument of journal.send()
facility = getattr(syslog,
self._syslog_facility,
syslog.LOG_USER) >> 3
journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
SYSLOG_FACILITY=facility,
**dict(journal_args))
else:
journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
**dict(journal_args))
except IOError:
# fall back to syslog since logging to journal failed
self._log_to_syslog(syslog_msg)
else:
self._log_to_syslog(syslog_msg)
def _log_invocation(self):
''' log that ansible ran the module '''
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
for param in self.params:
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', None)
# try to proactively capture password/passphrase fields
if no_log is None and PASSWORD_MATCH.search(param):
log_args[param] = 'NOT_LOGGING_PASSWORD'
self.warn('Module did not set no_log for %s' % param)
elif self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
else:
param_val = self.params[param]
if not isinstance(param_val, (text_type, binary_type)):
param_val = str(param_val)
elif isinstance(param_val, text_type):
param_val = param_val.encode('utf-8')
log_args[param] = heuristic_log_sanitize(param_val, self.no_log_values)
msg = ['%s=%s' % (to_native(arg), to_native(val)) for arg, val in log_args.items()]
if msg:
msg = 'Invoked with %s' % ' '.join(msg)
else:
msg = 'Invoked'
self.log(msg, log_args=log_args)
def _set_cwd(self):
try:
cwd = os.getcwd()
if not os.access(cwd, os.F_OK | os.R_OK):
raise Exception()
return cwd
except Exception:
# we don't have access to the cwd, probably because of sudo.
# Try and move to a neutral location to prevent errors
for cwd in [self.tmpdir, os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
if os.access(cwd, os.F_OK | os.R_OK):
os.chdir(cwd)
return cwd
except Exception:
pass
# we won't error here, as it may *not* be a problem,
# and we don't want to break modules unnecessarily
return None
def get_bin_path(self, arg, required=False, opt_dirs=None):
'''
Find system executable in PATH.
:param arg: The executable to find.
:param required: if executable is not found and required is ``True``, fail_json
:param opt_dirs: optional list of directories to search in addition to ``PATH``
:returns: if found return full path; otherwise return None
'''
bin_path = None
try:
bin_path = get_bin_path(arg=arg, opt_dirs=opt_dirs)
except ValueError as e:
if required:
self.fail_json(msg=to_text(e))
else:
return bin_path
return bin_path
def boolean(self, arg):
'''Convert the argument to a boolean'''
if arg is None:
return arg
try:
return boolean(arg)
except TypeError as e:
self.fail_json(msg=to_native(e))
def jsonify(self, data):
try:
return jsonify(data)
except UnicodeError as e:
self.fail_json(msg=to_text(e))
def from_json(self, data):
return json.loads(data)
def add_cleanup_file(self, path):
if path not in self.cleanup_files:
self.cleanup_files.append(path)
def do_cleanup_files(self):
for path in self.cleanup_files:
self.cleanup(path)
def _return_formatted(self, kwargs):
self.add_path_info(kwargs)
if 'invocation' not in kwargs:
kwargs['invocation'] = {'module_args': self.params}
if 'warnings' in kwargs:
if isinstance(kwargs['warnings'], list):
for w in kwargs['warnings']:
self.warn(w)
else:
self.warn(kwargs['warnings'])
warnings = get_warning_messages()
if warnings:
kwargs['warnings'] = warnings
if 'deprecations' in kwargs:
if isinstance(kwargs['deprecations'], list):
for d in kwargs['deprecations']:
if isinstance(d, SEQUENCETYPE) and len(d) == 2:
self.deprecate(d[0], version=d[1])
elif isinstance(d, Mapping):
self.deprecate(d['msg'], version=d.get('version'), date=d.get('date'),
collection_name=d.get('collection_name'))
else:
self.deprecate(d) # pylint: disable=ansible-deprecated-no-version
else:
self.deprecate(kwargs['deprecations']) # pylint: disable=ansible-deprecated-no-version
deprecations = get_deprecation_messages()
if deprecations:
kwargs['deprecations'] = deprecations
# preserve bools/none from no_log
# TODO: once python version on target high enough, dict comprh
preserved = {}
for k, v in kwargs.items():
if v is None or isinstance(v, bool):
preserved[k] = v
# strip no_log collisions
kwargs = remove_values(kwargs, self.no_log_values)
# return preserved
kwargs.update(preserved)
print('\n%s' % self.jsonify(kwargs))
def exit_json(self, **kwargs):
''' return from the module, without error '''
self.do_cleanup_files()
self._return_formatted(kwargs)
sys.exit(0)
def fail_json(self, msg, **kwargs):
''' return from the module, with an error message '''
kwargs['failed'] = True
kwargs['msg'] = msg
# Add traceback if debug or high verbosity and it is missing
# NOTE: Badly named as exception, it really always has been a traceback
if 'exception' not in kwargs and sys.exc_info()[2] and (self._debug or self._verbosity >= 3):
if PY2:
# On Python 2 this is the last (stack frame) exception and as such may be unrelated to the failure
kwargs['exception'] = 'WARNING: The below traceback may *not* be related to the actual failure.\n' +\
''.join(traceback.format_tb(sys.exc_info()[2]))
else:
kwargs['exception'] = ''.join(traceback.format_tb(sys.exc_info()[2]))
self.do_cleanup_files()
self._return_formatted(kwargs)
sys.exit(1)
def fail_on_missing_params(self, required_params=None):
if not required_params:
return
try:
check_missing_parameters(self.params, required_params)
except TypeError as e:
self.fail_json(msg=to_native(e))
def digest_from_file(self, filename, algorithm):
''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
b_filename = to_bytes(filename, errors='surrogate_or_strict')
if not os.path.exists(b_filename):
return None
if os.path.isdir(b_filename):
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
# preserve old behaviour where the third parameter was a hash algorithm object
if hasattr(algorithm, 'hexdigest'):
digest_method = algorithm
else:
try:
digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]()
except KeyError:
self.fail_json(msg="Could not hash file '%s' with algorithm '%s'. Available algorithms: %s" %
(filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
blocksize = 64 * 1024
infile = open(os.path.realpath(b_filename), 'rb')
block = infile.read(blocksize)
while block:
digest_method.update(block)
block = infile.read(blocksize)
infile.close()
return digest_method.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for:
1) Optional backwards compatibility
2) Compatibility with a third party protocol
This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead.
'''
if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, 'md5')
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha1')
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha256')
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
backupdest = ''
if os.path.exists(fn):
# backups named basename.PID.YYYY-MM-DD@HH:MM:SS~
ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
backupdest = '%s.%s.%s' % (fn, os.getpid(), ext)
try:
self.preserved_copy(fn, backupdest)
except (shutil.Error, IOError) as e:
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, to_native(e)))
return backupdest
def cleanup(self, tmpfile):
if os.path.exists(tmpfile):
try:
os.unlink(tmpfile)
except OSError as e:
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, to_native(e)))
def preserved_copy(self, src, dest):
"""Copy a file with preserved ownership, permissions and context"""
# shutil.copy2(src, dst)
# Similar to shutil.copy(), but metadata is copied as well - in fact,
# this is just shutil.copy() followed by copystat(). This is similar
# to the Unix command cp -p.
#
# shutil.copystat(src, dst)
# Copy the permission bits, last access time, last modification time,
# and flags from src to dst. The file contents, owner, and group are
# unaffected. src and dst are path names given as strings.
shutil.copy2(src, dest)
# Set the context
if self.selinux_enabled():
context = self.selinux_context(src)
self.set_context_if_different(dest, context, False)
# chown it
try:
dest_stat = os.stat(src)
tmp_stat = os.stat(dest)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(dest, dest_stat.st_uid, dest_stat.st_gid)
except OSError as e:
if e.errno != errno.EPERM:
raise
# Set the attributes
current_attribs = self.get_file_attributes(src, include_version=False)
current_attribs = current_attribs.get('attr_flags', '')
self.set_attributes_if_different(dest, current_attribs, True)
def atomic_move(self, src, dest, unsafe_writes=False):
'''atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible'''
context = None
dest_stat = None
b_src = to_bytes(src, errors='surrogate_or_strict')
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if os.path.exists(b_dest):
try:
dest_stat = os.stat(b_dest)
# copy mode and ownership
os.chmod(b_src, dest_stat.st_mode & PERM_BITS)
os.chown(b_src, dest_stat.st_uid, dest_stat.st_gid)
# try to copy flags if possible
if hasattr(os, 'chflags') and hasattr(dest_stat, 'st_flags'):
try:
os.chflags(b_src, dest_stat.st_flags)
except OSError as e:
for err in 'EOPNOTSUPP', 'ENOTSUP':
if hasattr(errno, err) and e.errno == getattr(errno, err):
break
else:
raise
except OSError as e:
if e.errno != errno.EPERM:
raise
if self.selinux_enabled():
context = self.selinux_context(dest)
else:
if self.selinux_enabled():
context = self.selinux_default_context(dest)
creating = not os.path.exists(b_dest)
try:
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(b_src, b_dest)
except (IOError, OSError) as e:
if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY, errno.EBUSY]:
# only try workarounds for errno 18 (cross device), 1 (not permitted), 13 (permission denied)
# and 26 (text file busy) which happens on vagrant synced folders and other 'exotic' non posix file systems
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
else:
# Use bytes here. In the shippable CI, this fails with
# a UnicodeError with surrogateescape'd strings for an unknown
# reason (doesn't happen in a local Ubuntu16.04 VM)
b_dest_dir = os.path.dirname(b_dest)
b_suffix = os.path.basename(b_dest)
error_msg = None
tmp_dest_name = None
try:
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=b'.ansible_tmp', dir=b_dest_dir, suffix=b_suffix)
except (OSError, IOError) as e:
error_msg = 'The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), to_native(e))
finally:
if error_msg:
if unsafe_writes:
self._unsafe_writes(b_src, b_dest)
else:
self.fail_json(msg=error_msg, exception=traceback.format_exc())
if tmp_dest_name:
b_tmp_dest_name = to_bytes(tmp_dest_name, errors='surrogate_or_strict')
try:
try:
# close tmp file handle before file operations to prevent text file busy errors on vboxfs synced folders (windows host)
os.close(tmp_dest_fd)
# leaves tmp file behind when sudo and not root
try:
shutil.move(b_src, b_tmp_dest_name)
except OSError:
# cleanup will happen by 'rm' of tmpdir
# copy2 will preserve some metadata
shutil.copy2(b_src, b_tmp_dest_name)
if self.selinux_enabled():
self.set_context_if_different(
b_tmp_dest_name, context, False)
try:
tmp_stat = os.stat(b_tmp_dest_name)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(b_tmp_dest_name, dest_stat.st_uid, dest_stat.st_gid)
except OSError as e:
if e.errno != errno.EPERM:
raise
try:
os.rename(b_tmp_dest_name, b_dest)
except (shutil.Error, OSError, IOError) as e:
if unsafe_writes and e.errno == errno.EBUSY:
self._unsafe_writes(b_tmp_dest_name, b_dest)
else:
self.fail_json(msg='Unable to make %s into to %s, failed final rename from %s: %s' %
(src, dest, b_tmp_dest_name, to_native(e)), exception=traceback.format_exc())
except (shutil.Error, OSError, IOError) as e:
if unsafe_writes:
self._unsafe_writes(b_src, b_dest)
else:
self.fail_json(msg='Failed to replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
finally:
self.cleanup(b_tmp_dest_name)
if creating:
# make sure the file has the correct permissions
# based on the current value of umask
umask = os.umask(0)
os.umask(umask)
os.chmod(b_dest, DEFAULT_PERM & ~umask)
try:
os.chown(b_dest, os.geteuid(), os.getegid())
except OSError:
# We're okay with trying our best here. If the user is not
# root (or old Unices) they won't be able to chown.
pass
if self.selinux_enabled():
# rename might not preserve context
self.set_context_if_different(dest, context, False)
def _unsafe_writes(self, src, dest):
# sadly there are some situations where we cannot ensure atomicity, but only if
# the user insists and we get the appropriate error we update the file unsafely
try:
out_dest = in_src = None
try:
out_dest = open(dest, 'wb')
in_src = open(src, 'rb')
shutil.copyfileobj(in_src, out_dest)
finally: # assuring closed files in 2.4 compatible way
if out_dest:
out_dest.close()
if in_src:
in_src.close()
except (shutil.Error, OSError, IOError) as e:
self.fail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, to_native(e)),
exception=traceback.format_exc())
def _clean_args(self, args):
if not self._clean:
# create a printable version of the command for use in reporting later,
# which strips out things like passwords from the args list
to_clean_args = args
if PY2:
if isinstance(args, text_type):
to_clean_args = to_bytes(args)
else:
if isinstance(args, binary_type):
to_clean_args = to_text(args)
if isinstance(args, (text_type, binary_type)):
to_clean_args = shlex.split(to_clean_args)
clean_args = []
is_passwd = False
for arg in (to_native(a) for a in to_clean_args):
if is_passwd:
is_passwd = False
clean_args.append('********')
continue
if PASSWD_ARG_RE.match(arg):
sep_idx = arg.find('=')
if sep_idx > -1:
clean_args.append('%s=********' % arg[:sep_idx])
continue
else:
is_passwd = True
arg = heuristic_log_sanitize(arg, self.no_log_values)
clean_args.append(arg)
self._clean = ' '.join(shlex_quote(arg) for arg in clean_args)
return self._clean
def _restore_signal_handlers(self):
# Reset SIGPIPE to SIG_DFL, otherwise in Python2.7 it gets ignored in subprocesses.
if PY2 and sys.platform != 'win32':
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None,
use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict',
expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None, ignore_invalid_cwd=True, handle_exceptions=True):
'''
Execute a command, returns rc, stdout, and stderr.
The mechanism of this method for reading stdout and stderr differs from
that of CPython subprocess.Popen.communicate, in that this method will
stop reading once the spawned command has exited and stdout and stderr
have been consumed, as opposed to waiting until stdout/stderr are
closed. This can be an important distinction, when taken into account
that a forked or backgrounded process may hold stdout or stderr open
for longer than the spawned command.
:arg args: is the command to run
* If args is a list, the command will be run with shell=False.
* If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
* If args is a string and use_unsafe_shell=True it runs with shell=True.
:kw check_rc: Whether to call fail_json in case of non zero RC.
Default False
:kw close_fds: See documentation for subprocess.Popen(). Default True
:kw executable: See documentation for subprocess.Popen(). Default None
:kw data: If given, information to write to the stdin of the command
:kw binary_data: If False, append a newline to the data. Default False
:kw path_prefix: If given, additional path to find the command in.
This adds to the PATH environment variable so helper commands in
the same directory can also be found
:kw cwd: If given, working directory to run the command inside
:kw use_unsafe_shell: See `args` parameter. Default False
:kw prompt_regex: Regex string (not a compiled regex) which can be
used to detect prompts in the stdout which would otherwise cause
the execution to hang (especially if no input data is specified)
:kw environ_update: dictionary to *update* environ variables with
:kw umask: Umask to be used when running the command. Default None
:kw encoding: Since we return native strings, on python3 we need to
know the encoding to use to transform from bytes to text. If you
want to always get bytes back, use encoding=None. The default is
"utf-8". This does not affect transformation of strings given as
args.
:kw errors: Since we return native strings, on python3 we need to
transform stdout and stderr from bytes to text. If the bytes are
undecodable in the ``encoding`` specified, then use this error
handler to deal with them. The default is ``surrogate_or_strict``
which means that the bytes will be decoded using the
surrogateescape error handler if available (available on all
python3 versions we support) otherwise a UnicodeError traceback
will be raised. This does not affect transformations of strings
given as args.
:kw expand_user_and_vars: When ``use_unsafe_shell=False`` this argument
dictates whether ``~`` is expanded in paths and environment variables
are expanded before running the command. When ``True`` a string such as
``$SHELL`` will be expanded regardless of escaping. When ``False`` and
``use_unsafe_shell=False`` no path or variable expansion will be done.
:kw pass_fds: When running on Python 3 this argument
dictates which file descriptors should be passed
to an underlying ``Popen`` constructor. On Python 2, this will
set ``close_fds`` to False.
:kw before_communicate_callback: This function will be called
after ``Popen`` object will be created
but before communicating to the process.
(``Popen`` object will be passed to callback as a first argument)
:kw ignore_invalid_cwd: This flag indicates whether an invalid ``cwd``
(non-existent or not a directory) should be ignored or should raise
an exception.
:kw handle_exceptions: This flag indicates whether an exception will
be handled inline and issue a failed_json or if the caller should
handle it.
:returns: A 3-tuple of return code (integer), stdout (native string),
and stderr (native string). On python2, stdout and stderr are both
byte strings. On python3, stdout and stderr are text strings converted
according to the encoding and errors parameters. If you want byte
strings on python3, use encoding=None to turn decoding to text off.
'''
# used by clean args later on
self._clean = None
if not isinstance(args, (list, binary_type, text_type)):
msg = "Argument 'args' to run_command must be list or string"
self.fail_json(rc=257, cmd=args, msg=msg)
shell = False
if use_unsafe_shell:
# stringify args for unsafe/direct shell usage
if isinstance(args, list):
args = b" ".join([to_bytes(shlex_quote(x), errors='surrogate_or_strict') for x in args])
else:
args = to_bytes(args, errors='surrogate_or_strict')
# not set explicitly, check if set by controller
if executable:
executable = to_bytes(executable, errors='surrogate_or_strict')
args = [executable, b'-c', args]
elif self._shell not in (None, '/bin/sh'):
args = [to_bytes(self._shell, errors='surrogate_or_strict'), b'-c', args]
else:
shell = True
else:
# ensure args are a list
if isinstance(args, (binary_type, text_type)):
# On python2.6 and below, shlex has problems with text type
# On python3, shlex needs a text type.
if PY2:
args = to_bytes(args, errors='surrogate_or_strict')
elif PY3:
args = to_text(args, errors='surrogateescape')
args = shlex.split(args)
# expand ``~`` in paths, and all environment vars
if expand_user_and_vars:
args = [to_bytes(os.path.expanduser(os.path.expandvars(x)), errors='surrogate_or_strict') for x in args if x is not None]
else:
args = [to_bytes(x, errors='surrogate_or_strict') for x in args if x is not None]
prompt_re = None
if prompt_regex:
if isinstance(prompt_regex, text_type):
if PY3:
prompt_regex = to_bytes(prompt_regex, errors='surrogateescape')
elif PY2:
prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict')
try:
prompt_re = re.compile(prompt_regex, re.MULTILINE)
except re.error:
self.fail_json(msg="invalid prompt regular expression given to run_command")
rc = 0
msg = None
st_in = None
env = os.environ.copy()
# We can set this from both an attribute and per call
env.update(self.run_command_environ_update or {})
env.update(environ_update or {})
if path_prefix:
path = env.get('PATH', '')
if path:
env['PATH'] = "%s:%s" % (path_prefix, path)
else:
env['PATH'] = path_prefix
# If using test-module.py and explode, the remote lib path will resemble:
# /tmp/test_module_scratch/debug_dir/ansible/module_utils/basic.py
# If using ansible or ansible-playbook with a remote system:
# /tmp/ansible_vmweLQ/ansible_modlib.zip/ansible/module_utils/basic.py
# Clean out python paths set by ansiballz
if 'PYTHONPATH' in env:
pypaths = [x for x in env['PYTHONPATH'].split(':')
if x and
not x.endswith('/ansible_modlib.zip') and
not x.endswith('/debug_dir')]
if pypaths and any(pypaths):
env['PYTHONPATH'] = ':'.join(pypaths)
if data:
st_in = subprocess.PIPE
def preexec():
self._restore_signal_handlers()
if umask:
os.umask(umask)
kwargs = dict(
executable=executable,
shell=shell,
close_fds=close_fds,
stdin=st_in,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=preexec,
env=env,
)
if PY3 and pass_fds:
kwargs["pass_fds"] = pass_fds
elif PY2 and pass_fds:
kwargs['close_fds'] = False
# make sure we're in the right working directory
if cwd:
cwd = to_bytes(os.path.abspath(os.path.expanduser(cwd)), errors='surrogate_or_strict')
if os.path.isdir(cwd):
kwargs['cwd'] = cwd
elif not ignore_invalid_cwd:
self.fail_json(msg="Provided cwd is not a valid directory: %s" % cwd)
try:
if self._debug:
self.log('Executing: ' + self._clean_args(args))
cmd = subprocess.Popen(args, **kwargs)
if before_communicate_callback:
before_communicate_callback(cmd)
stdout = b''
stderr = b''
# Mirror the CPython subprocess logic and preference for the selector to use.
# poll/select have the advantage of not requiring any extra file
# descriptor, contrarily to epoll/kqueue (also, they require a single
# syscall).
if hasattr(selectors, 'PollSelector'):
selector = selectors.PollSelector()
else:
selector = selectors.SelectSelector()
if data:
if not binary_data:
data += '\n'
if isinstance(data, text_type):
data = to_bytes(data)
selector.register(cmd.stdout, selectors.EVENT_READ)
selector.register(cmd.stderr, selectors.EVENT_READ)
if os.name == 'posix':
fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
if data:
cmd.stdin.write(data)
cmd.stdin.close()
while True:
# A timeout of 1 is both a little short and a little long.
# With None we could deadlock, with a lower value we would
# waste cycles. As it is, this is a mild inconvenience if
# we need to exit, and likely doesn't waste too many cycles
events = selector.select(1)
stdout_changed = False
for key, event in events:
b_chunk = key.fileobj.read(32768)
if not b_chunk:
selector.unregister(key.fileobj)
elif key.fileobj == cmd.stdout:
stdout += b_chunk
stdout_changed = True
elif key.fileobj == cmd.stderr:
stderr += b_chunk
# if we're checking for prompts, do it now, but only if stdout
# actually changed since the last loop
if prompt_re and stdout_changed and prompt_re.search(stdout) and not data:
if encoding:
stdout = to_native(stdout, encoding=encoding, errors=errors)
return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
# break out if no pipes are left to read or the pipes are completely read
# and the process is terminated
if (not events or not selector.get_map()) and cmd.poll() is not None:
break
# No pipes are left to read but process is not yet terminated
# Only then it is safe to wait for the process to be finished
# NOTE: Actually cmd.poll() is always None here if no selectors are left
elif not selector.get_map() and cmd.poll() is None:
cmd.wait()
# The process is terminated. Since no pipes to read from are
# left, there is no need to call select() again.
break
cmd.stdout.close()
cmd.stderr.close()
selector.close()
rc = cmd.returncode
except (OSError, IOError) as e:
self.log("Error Executing CMD:%s Exception:%s" % (self._clean_args(args), to_native(e)))
if handle_exceptions:
self.fail_json(rc=e.errno, stdout=b'', stderr=b'', msg=to_native(e), cmd=self._clean_args(args))
else:
raise e
except Exception as e:
self.log("Error Executing CMD:%s Exception:%s" % (self._clean_args(args), to_native(traceback.format_exc())))
if handle_exceptions:
self.fail_json(rc=257, stdout=b'', stderr=b'', msg=to_native(e), exception=traceback.format_exc(), cmd=self._clean_args(args))
else:
raise e
if rc != 0 and check_rc:
msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values)
self.fail_json(cmd=self._clean_args(args), rc=rc, stdout=stdout, stderr=stderr, msg=msg)
if encoding is not None:
return (rc, to_native(stdout, encoding=encoding, errors=errors),
to_native(stderr, encoding=encoding, errors=errors))
return (rc, stdout, stderr)
def append_to_file(self, filename, str):
filename = os.path.expandvars(os.path.expanduser(filename))
fh = open(filename, 'a')
fh.write(str)
fh.close()
def bytes_to_human(self, size):
return bytes_to_human(size)
# for backwards compatibility
pretty_bytes = bytes_to_human
def human_to_bytes(self, number, isbits=False):
return human_to_bytes(number, isbits)
#
# Backwards compat
#
# In 2.0, moved from inside the module to the toplevel
is_executable = is_executable
@staticmethod
def get_buffer_size(fd):
try:
# 1032 == FZ_GETPIPE_SZ
buffer_size = fcntl.fcntl(fd, 1032)
except Exception:
try:
# not as exact as above, but should be good enough for most platforms that fail the previous call
buffer_size = select.PIPE_BUF
except Exception:
buffer_size = 9000 # use sane default JIC
return buffer_size
def get_module_path():
return os.path.dirname(os.path.realpath(__file__))
def __getattr__(importable_name):
"""Inject import-time deprecation warnings.
Specifically, for ``literal_eval()``, ``_literal_eval()``
and ``get_exception()``.
"""
if importable_name == 'get_exception':
deprecate(
msg=f'The `ansible.module_utils.basic.'
f'{importable_name}` function is deprecated.',
version='2.19',
)
from ansible.module_utils.pycompat24 import get_exception
return get_exception
if importable_name in {'literal_eval', '_literal_eval'}:
deprecate(
msg=f'The `ansible.module_utils.basic.'
f'{importable_name}` function is deprecated.',
version='2.19',
)
from ast import literal_eval
return literal_eval
raise AttributeError(
f'cannot import name {importable_name !r} '
f'has no attribute ({__file__ !s})',
)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 76,727 |
lineinfile: Fails when creating a backup of Ubuntu 21.10's /etc/resolv.conf (symlink to "../run/systemd/resolve/stub-resolv.conf")
|
### Summary
https://github.com/ansible/ansible/issues/31982 is back.
### Issue Type
Bug Report
### Component Name
lineinfile
### Ansible Version
```console
$ ansible --version
ansible 2.10.8
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3/dist-packages/ansible
executable location = /usr/bin/ansible
python version = 3.9.7 (default, Sep 10 2021, 14:59:43) [GCC 11.2.0]
```
### Configuration
```console
$ ansible-config dump --only-changed
$
```
### OS / Environment
Ubuntu 21.10 Server in a VirtualBox, on server and clients, nothing special
### Steps to Reproduce
```
- name: "Check DNS configuration: Remove search list"
ansible.builtin.lineinfile:
backup: "yes"
path: "/etc/resolv.conf"
line: "search x.tld"
state: "absent"
- name: "Check DNS configuration: Cache servers present"
ansible.builtin.lineinfile:
backup: "yes"
line: "{{ item }}"
path: "/etc/resolv.conf"
state: "present"
with_items:
- "nameserver x.x.x.x"
- "nameserver y.y.y.y"
```
### Expected Results
The file is modified, a backup is created.
### Actual Results
```console
<x.x.x.x> (1, b'\r\n{"path": "/etc/resolv.conf.2410.2022-01-11@10:36:56~", "details": "Error while setting attributes: /etc/resolv.conf.2410.2022-01-11@10:36:56~: Operation not supported\\n", "exception": "Traceback (most recent call last):\\n File \\"/tmp/ansible_ansible.builtin.lineinfile_payload_pvwbc704/ansible_ansible.builtin.lineinfile_payload.zip/ansible/module_utils/basic.py\\", line 1256, in set_attributes_if_different\\n raise Exception(\\"Error while setting attributes: %s\\" % (out + err))\\nException: Error while setting attributes: /etc/resolv.conf.2410.2022-01-11@10:36:56~: Operation not supported\\n\\n", "failed": true, "msg": "chattr failed", "uid": 101, "gid": 103, "owner": "systemd-resolve", "group": "systemd-resolve", "mode": "0644", "state": "file", "size": 920, "invocation": {"module_args": {"backup": true, "line": "nameserver x.x.x.x", "path": "/etc/resolv.conf", "state": "present", "backrefs": false, "create": false, "firstmatch": false, "unsafe_writes": false, "regexp": null, "insertafter": null, "insertbefore": null, "validate": null, "mode": null, "owner": null, "group": null, "seuser": null, "serole": null, "selevel": null, "setype": null, "attributes": null}}}\r\n', b'Shared connection to x.x.x.x closed.\r\n')
<x.x.x.x> Failed to connect to the host via ssh: Shared connection to x.x.x.x closed.
<x.x.x.x> ESTABLISH SSH CONNECTION FOR USER: root
<x.x.x.x> SSH: EXEC ssh -C -o ControlMaster=auto -o ControlPersist=60s -o KbdInteractiveAuthentication=no -o PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o 'User="root"' -o ConnectTimeout=10 -o ControlPath=/root/.ansible/cp/4c20d84404 x.x.x.x '/bin/sh -c '"'"'rm -f -r /root/.ansible/tmp/ansible-tmp-1641897416.2652106-3817-66846454678877/ > /dev/null 2>&1 && sleep 0'"'"''
The full traceback is:
Traceback (most recent call last):
File "/tmp/ansible_ansible.builtin.lineinfile_payload_bccsus14/ansible_ansible.builtin.lineinfile_payload.zip/ansible/module_utils/basic.py", line 1256, in set_attributes_if_different
raise Exception("Error while setting attributes: %s" % (out + err))
Exception: Error while setting attributes: /etc/resolv.conf.2577.2022-01-11@10:36:56~: Operation not supported
failed: [x.x.x.x] (item=nameserver x.x.x.x) => {
"ansible_loop_var": "item",
"changed": false,
"details": "Error while setting attributes: /etc/resolv.conf.2577.2022-01-11@10:36:56~: Operation not supported\n",
"gid": 103,
"group": "systemd-resolve",
"invocation": {
"module_args": {
"attributes": null,
"backrefs": false,
"backup": true,
"create": false,
"firstmatch": false,
"group": null,
"insertafter": null,
"insertbefore": null,
"line": "nameserver x.x.x.x",
"mode": null,
"owner": null,
"path": "/etc/resolv.conf",
"regexp": null,
"selevel": null,
"serole": null,
"setype": null,
"seuser": null,
"state": "present",
"unsafe_writes": false,
"validate": null
}
},
"item": "nameserver x.x.x.x",
"mode": "0644",
"msg": "chattr failed",
"owner": "systemd-resolve",
"path": "/etc/resolv.conf.2577.2022-01-11@10:36:56~",
"size": 946,
"state": "file",
"uid": 101
}
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/76727
|
https://github.com/ansible/ansible/pull/78707
|
dd0138ba2127eac0e809d68e00ae117df56db77e
|
9b002d2e63ec0d39c7c3025dfd4be7489c82016b
| 2022-01-11T11:07:14Z |
python
| 2023-12-06T17:02:51Z |
test/integration/targets/lineinfile/tasks/main.yml
|
# test code for the lineinfile module
# (c) 2014, James Cammarata <[email protected]>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: deploy the test file for lineinfile
copy:
src: test.txt
dest: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert that the test file was deployed
assert:
that:
- result is changed
- "result.checksum == '5feac65e442c91f557fc90069ce6efc4d346ab51'"
- "result.state == 'file'"
- name: "create a file that does not yet exist with `create: yes` and produce diff"
lineinfile:
dest: "{{ remote_tmp_dir }}/a/a.txt"
state: present
line: "First line"
create: yes
diff: yes
register: result1
- name: assert that a diff was returned
assert:
that:
- result1.diff | length > 0
- name: stat the new file
stat:
path: "{{ remote_tmp_dir }}/a/a.txt"
register: result
- name: assert that the file exists
assert:
that:
- result.stat.exists
- block:
- name: "EXPECTED FAILURE - test source file does not exist w/o `create: yes`"
lineinfile:
path: "/some/where/that/doesnotexist.txt"
state: present
line: "Doesn't matter"
- fail:
msg: "Should not get here"
rescue:
- name: Validate failure
assert:
that:
- "'Destination /some/where/that/doesnotexist.txt does not exist !' in ansible_failed_result.msg"
- block:
- name: EXPECTED FAILURE - test invalid `validate` value
lineinfile:
path: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "Doesn't matter"
validate: '/some/path'
- fail:
msg: "Should not get here"
rescue:
- name: Validate failure
assert:
that:
- "'validate must contain %s: /some/path' in ansible_failed_result.msg"
- name: insert a line at the beginning of the file, and back it up
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line at the beginning"
insertbefore: "BOF"
backup: yes
register: result1
- name: insert a line at the beginning of the file again
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line at the beginning"
insertbefore: "BOF"
register: result2
- name: assert that the line was inserted at the head of the file
assert:
that:
- result1 is changed
- result2 is not changed
- result1.msg == 'line added'
- result1.backup != ''
- name: stat the backup file
stat:
path: "{{ result1.backup }}"
register: result
- name: assert the backup file matches the previous hash
assert:
that:
- "result.stat.checksum == '5feac65e442c91f557fc90069ce6efc4d346ab51'"
- name: stat the test after the insert at the head
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test hash is what we expect for the file with the insert at the head
assert:
that:
- "result.stat.checksum == '7eade4042b23b800958fe807b5bfc29f8541ec09'"
- name: insert a line at the end of the file
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line at the end"
insertafter: "EOF"
register: result
- name: assert that the line was inserted at the end of the file
assert:
that:
- result is changed
- "result.msg == 'line added'"
- name: stat the test after the insert at the end
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the insert at the end
assert:
that:
- "result.stat.checksum == 'fb57af7dc10a1006061b000f1f04c38e4bef50a9'"
- name: insert a line after the first line
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line after line 1"
insertafter: "^This is line 1$"
register: result
- name: assert that the line was inserted after the first line
assert:
that:
- result is changed
- "result.msg == 'line added'"
- name: stat the test after insert after the first line
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the insert after the first line
assert:
that:
- "result.stat.checksum == '5348da605b1bc93dbadf3a16474cdf22ef975bec'"
- name: insert a line before the last line
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line before line 5"
insertbefore: "^This is line 5$"
register: result
- name: assert that the line was inserted before the last line
assert:
that:
- result is changed
- "result.msg == 'line added'"
- name: stat the test after the insert before the last line
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the insert before the last line
assert:
that:
- "result.stat.checksum == '2e9e460ff68929e4453eb765761fd99814f6e286'"
- name: Replace a line with backrefs
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "This is line 3"
backrefs: yes
regexp: "^(REF) .* \\1$"
register: backrefs_result1
- name: Replace a line with backrefs again
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "This is line 3"
backrefs: yes
regexp: "^(REF) .* \\1$"
register: backrefs_result2
- command: cat {{ remote_tmp_dir }}/test.txt
- name: assert that the line with backrefs was changed
assert:
that:
- backrefs_result1 is changed
- backrefs_result2 is not changed
- "backrefs_result1.msg == 'line replaced'"
- name: stat the test after the backref line was replaced
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after backref line was replaced
assert:
that:
- "result.stat.checksum == '72f60239a735ae06e769d823f5c2b4232c634d9c'"
- name: remove the middle line
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: absent
regexp: "^This is line 3$"
register: result
- name: assert that the line was removed
assert:
that:
- result is changed
- "result.msg == '1 line(s) removed'"
- name: stat the test after the middle line was removed
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the middle line was removed
assert:
that:
- "result.stat.checksum == 'd4eeb07bdebab2d1cdb3ec4a3635afa2618ad4ea'"
- name: try to remove the middle line again
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: absent
regexp: "^This is line 3$"
register: result
- name: assert no change was made
assert:
that: result is not changed
- name: use stat to verify no change was made
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the middle line was removed
assert:
that:
- "result.stat.checksum == 'd4eeb07bdebab2d1cdb3ec4a3635afa2618ad4ea'"
- name: run a validation script that succeeds
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: absent
regexp: "^This is line 5$"
validate: "true %s"
register: result
- name: assert that the file validated after removing a line
assert:
that:
- result is changed
- "result.msg == '1 line(s) removed'"
- name: stat the test after the validation succeeded
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the validation succeeded
assert:
that:
- "result.stat.checksum == 'ab56c210ea82839a54487464800fed4878cb2608'"
- name: run a validation script that fails
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: absent
regexp: "^This is line 1$"
validate: "/bin/false %s"
register: result
ignore_errors: yes
- name: assert that the validate failed
assert:
that:
- "result.failed == true"
- name: stat the test after the validation failed
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches the previous after the validation failed
assert:
that:
- "result.stat.checksum == 'ab56c210ea82839a54487464800fed4878cb2608'"
- import_tasks: test_string01.yml
- name: use create=yes
lineinfile:
dest: "{{ remote_tmp_dir }}/new_test.txt"
create: yes
insertbefore: BOF
state: present
line: "This is a new file"
register: result
- name: assert that the new file was created
assert:
that:
- result is changed
- "result.msg == 'line added'"
- name: validate that the newly created file exists
stat:
path: "{{ remote_tmp_dir }}/new_test.txt"
register: result
ignore_errors: yes
- name: assert the newly created test checksum matches
assert:
that:
- "result.stat.checksum == '038f10f9e31202451b093163e81e06fbac0c6f3a'"
- name: Create a file without a path
lineinfile:
dest: file.txt
create: yes
line: Test line
register: create_no_path_test
- name: Stat the file
stat:
path: file.txt
register: create_no_path_file
- name: Ensure file was created
assert:
that:
- create_no_path_test is changed
- create_no_path_file.stat.exists
# Test EOF in cases where file has no newline at EOF
- name: testnoeof deploy the file for lineinfile
copy:
src: testnoeof.txt
dest: "{{ remote_tmp_dir }}/testnoeof.txt"
register: result
- name: testnoeof insert a line at the end of the file
lineinfile:
dest: "{{ remote_tmp_dir }}/testnoeof.txt"
state: present
line: "New line at the end"
insertafter: "EOF"
register: result
- name: testempty assert that the line was inserted at the end of the file
assert:
that:
- result is changed
- "result.msg == 'line added'"
- name: insert a multiple lines at the end of the file
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "This is a line\nwith \\n character"
insertafter: "EOF"
register: result
- name: assert that the multiple lines was inserted
assert:
that:
- result is changed
- "result.msg == 'line added'"
- name: testnoeof stat the no newline EOF test after the insert at the end
stat:
path: "{{ remote_tmp_dir }}/testnoeof.txt"
register: result
- name: testnoeof assert test checksum matches after the insert at the end
assert:
that:
- "result.stat.checksum == 'f9af7008e3cb67575ce653d094c79cabebf6e523'"
# Test EOF with empty file to make sure no unnecessary newline is added
- name: testempty deploy the testempty file for lineinfile
copy:
src: testempty.txt
dest: "{{ remote_tmp_dir }}/testempty.txt"
register: result
- name: testempty insert a line at the end of the file
lineinfile:
dest: "{{ remote_tmp_dir }}/testempty.txt"
state: present
line: "New line at the end"
insertafter: "EOF"
register: result
- name: testempty assert that the line was inserted at the end of the file
assert:
that:
- result is changed
- "result.msg == 'line added'"
- name: testempty stat the test after the insert at the end
stat:
path: "{{ remote_tmp_dir }}/testempty.txt"
register: result
- name: testempty assert test checksum matches after the insert at the end
assert:
that:
- "result.stat.checksum == 'f440dc65ea9cec3fd496c1479ddf937e1b949412'"
- stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after inserting multiple lines
assert:
that:
- "result.stat.checksum == 'fde683229429a4f05d670e6c10afc875e1d5c489'"
- name: replace a line with backrefs included in the line
lineinfile:
dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New \\1 created with the backref"
backrefs: yes
regexp: "^This is (line 4)$"
register: result
- name: assert that the line with backrefs was changed
assert:
that:
- result is changed
- "result.msg == 'line replaced'"
- name: stat the test after the backref line was replaced
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after backref line was replaced
assert:
that:
- "result.stat.checksum == '981ad35c4b30b03bc3a1beedce0d1e72c491898e'"
###################################################################
# issue 8535
- name: create a new file for testing quoting issues
file:
dest: "{{ remote_tmp_dir }}/test_quoting.txt"
state: touch
register: result
- name: assert the new file was created
assert:
that:
- result is changed
- name: use with_items to add code-like strings to the quoting txt file
lineinfile:
dest: "{{ remote_tmp_dir }}/test_quoting.txt"
line: "{{ item }}"
insertbefore: BOF
with_items:
- "'foo'"
- "dotenv.load();"
- "var dotenv = require('dotenv');"
register: result
- name: assert the quote test file was modified correctly
assert:
that:
- result.results|length == 3
- result.results[0] is changed
- result.results[0].item == "'foo'"
- result.results[1] is changed
- result.results[1].item == "dotenv.load();"
- result.results[2] is changed
- result.results[2].item == "var dotenv = require('dotenv');"
- name: stat the quote test file
stat:
path: "{{ remote_tmp_dir }}/test_quoting.txt"
register: result
- name: assert test checksum matches after backref line was replaced
assert:
that:
- "result.stat.checksum == '7dc3cb033c3971e73af0eaed6623d4e71e5743f1'"
- name: insert a line into the quoted file with a single quote
lineinfile:
dest: "{{ remote_tmp_dir }}/test_quoting.txt"
line: "import g'"
register: result
- name: assert that the quoted file was changed
assert:
that:
- result is changed
- name: stat the quote test file
stat:
path: "{{ remote_tmp_dir }}/test_quoting.txt"
register: result
- name: assert test checksum matches after backref line was replaced
assert:
that:
- "result.stat.checksum == '73b271c2cc1cef5663713bc0f00444b4bf9f4543'"
- name: insert a line into the quoted file with many double quotation strings
lineinfile:
dest: "{{ remote_tmp_dir }}/test_quoting.txt"
line: "\"quote\" and \"unquote\""
register: result
- name: assert that the quoted file was changed
assert:
that:
- result is changed
- name: stat the quote test file
stat:
path: "{{ remote_tmp_dir }}/test_quoting.txt"
register: result
- name: assert test checksum matches after backref line was replaced
assert:
that:
- "result.stat.checksum == 'b10ab2a3c3b6492680c8d0b1d6f35aa6b8f9e731'"
###################################################################
# Issue 28721
- name: Deploy the testmultiple file
copy:
src: testmultiple.txt
dest: "{{ remote_tmp_dir }}/testmultiple.txt"
register: result
- name: Assert that the testmultiple file was deployed
assert:
that:
- result is changed
- result.checksum == '3e0090a34fb641f3c01e9011546ff586260ea0ea'
- result.state == 'file'
# Test insertafter
- name: Write the same line to a file inserted after different lines
lineinfile:
path: "{{ remote_tmp_dir }}/testmultiple.txt"
insertafter: "{{ item.regex }}"
line: "{{ item.replace }}"
register: _multitest_1
with_items: "{{ test_regexp }}"
- name: Assert that the line is added once only
assert:
that:
- _multitest_1.results.0 is changed
- _multitest_1.results.1 is not changed
- _multitest_1.results.2 is not changed
- _multitest_1.results.3 is not changed
- name: Do the same thing again to check for changes
lineinfile:
path: "{{ remote_tmp_dir }}/testmultiple.txt"
insertafter: "{{ item.regex }}"
line: "{{ item.replace }}"
register: _multitest_2
with_items: "{{ test_regexp }}"
- name: Assert that the line is not added anymore
assert:
that:
- _multitest_2.results.0 is not changed
- _multitest_2.results.1 is not changed
- _multitest_2.results.2 is not changed
- _multitest_2.results.3 is not changed
- name: Stat the insertafter file
stat:
path: "{{ remote_tmp_dir }}/testmultiple.txt"
register: result
- name: Assert that the insertafter file matches expected checksum
assert:
that:
- result.stat.checksum == 'c6733b6c53ddd0e11e6ba39daa556ef8f4840761'
# Test insertbefore
- name: Deploy the testmultiple file
copy:
src: testmultiple.txt
dest: "{{ remote_tmp_dir }}/testmultiple.txt"
register: result
- name: Assert that the testmultiple file was deployed
assert:
that:
- result is changed
- result.checksum == '3e0090a34fb641f3c01e9011546ff586260ea0ea'
- result.state == 'file'
- name: Write the same line to a file inserted before different lines
lineinfile:
path: "{{ remote_tmp_dir }}/testmultiple.txt"
insertbefore: "{{ item.regex }}"
line: "{{ item.replace }}"
register: _multitest_3
with_items: "{{ test_regexp }}"
- name: Assert that the line is added once only
assert:
that:
- _multitest_3.results.0 is changed
- _multitest_3.results.1 is not changed
- _multitest_3.results.2 is not changed
- _multitest_3.results.3 is not changed
- name: Do the same thing again to check for changes
lineinfile:
path: "{{ remote_tmp_dir }}/testmultiple.txt"
insertbefore: "{{ item.regex }}"
line: "{{ item.replace }}"
register: _multitest_4
with_items: "{{ test_regexp }}"
- name: Assert that the line is not added anymore
assert:
that:
- _multitest_4.results.0 is not changed
- _multitest_4.results.1 is not changed
- _multitest_4.results.2 is not changed
- _multitest_4.results.3 is not changed
- name: Stat the insertbefore file
stat:
path: "{{ remote_tmp_dir }}/testmultiple.txt"
register: result
- name: Assert that the insertbefore file matches expected checksum
assert:
that:
- result.stat.checksum == '5d298651fbc377b45257da10308a9dc2fe1f8be5'
###################################################################
# Issue 36156
# Test insertbefore and insertafter with regexp
- name: Deploy the test.conf file
copy:
src: test.conf
dest: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the test.conf file was deployed
assert:
that:
- result is changed
- result.checksum == '6037f13e419b132eb3fd20a89e60c6c87a6add38'
- result.state == 'file'
# Test instertafter
- name: Insert lines after with regexp
lineinfile:
path: "{{ remote_tmp_dir }}/test.conf"
regexp: "{{ item.regexp }}"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
with_items: "{{ test_befaf_regexp }}"
register: _multitest_5
- name: Do the same thing again and check for changes
lineinfile:
path: "{{ remote_tmp_dir }}/test.conf"
regexp: "{{ item.regexp }}"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
with_items: "{{ test_befaf_regexp }}"
register: _multitest_6
- name: Assert that the file was changed the first time but not the second time
assert:
that:
- item.0 is changed
- item.1 is not changed
with_together:
- "{{ _multitest_5.results }}"
- "{{ _multitest_6.results }}"
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file contents match what is expected
assert:
that:
- result.stat.checksum == '06e2c456e5028dd7bcd0b117b5927a1139458c82'
- name: Do the same thing a third time without regexp and check for changes
lineinfile:
path: "{{ remote_tmp_dir }}/test.conf"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
with_items: "{{ test_befaf_regexp }}"
register: _multitest_7
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file was changed when no regexp was provided
assert:
that:
- item is not changed
with_items: "{{ _multitest_7.results }}"
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file contents match what is expected
assert:
that:
- result.stat.checksum == '06e2c456e5028dd7bcd0b117b5927a1139458c82'
# Test insertbefore
- name: Deploy the test.conf file
copy:
src: test.conf
dest: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the test.conf file was deployed
assert:
that:
- result is changed
- result.checksum == '6037f13e419b132eb3fd20a89e60c6c87a6add38'
- result.state == 'file'
- name: Insert lines before with regexp
lineinfile:
path: "{{ remote_tmp_dir }}/test.conf"
regexp: "{{ item.regexp }}"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
with_items: "{{ test_befaf_regexp }}"
register: _multitest_8
- name: Do the same thing again and check for changes
lineinfile:
path: "{{ remote_tmp_dir }}/test.conf"
regexp: "{{ item.regexp }}"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
with_items: "{{ test_befaf_regexp }}"
register: _multitest_9
- name: Assert that the file was changed the first time but not the second time
assert:
that:
- item.0 is changed
- item.1 is not changed
with_together:
- "{{ _multitest_8.results }}"
- "{{ _multitest_9.results }}"
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file contents match what is expected
assert:
that:
- result.stat.checksum == 'c3be9438a07c44d4c256cebfcdbca15a15b1db91'
- name: Do the same thing a third time without regexp and check for changes
lineinfile:
path: "{{ remote_tmp_dir }}/test.conf"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
with_items: "{{ test_befaf_regexp }}"
register: _multitest_10
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file was changed when no regexp was provided
assert:
that:
- item is not changed
with_items: "{{ _multitest_10.results }}"
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file contents match what is expected
assert:
that:
- result.stat.checksum == 'c3be9438a07c44d4c256cebfcdbca15a15b1db91'
- name: Copy empty file to test with insertbefore
copy:
src: testempty.txt
dest: "{{ remote_tmp_dir }}/testempty.txt"
- name: Add a line to empty file with insertbefore
lineinfile:
path: "{{ remote_tmp_dir }}/testempty.txt"
line: top
insertbefore: '^not in the file$'
register: oneline_insbefore_test1
- name: Add a line to file with only one line using insertbefore
lineinfile:
path: "{{ remote_tmp_dir }}/testempty.txt"
line: top
insertbefore: '^not in the file$'
register: oneline_insbefore_test2
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/testempty.txt"
register: oneline_insbefore_file
- name: Assert that insertebefore worked properly with a one line file
assert:
that:
- oneline_insbefore_test1 is changed
- oneline_insbefore_test2 is not changed
- oneline_insbefore_file.stat.checksum == '4dca56d05a21f0d018cd311f43e134e4501cf6d9'
- import_tasks: test_string02.yml
# Issue 29443
# When using an empty regexp, replace the last line (since it matches every line)
# but also provide a warning.
- name: Deploy the test file for lineinfile
copy:
src: test.txt
dest: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: Assert that the test file was deployed
assert:
that:
- result is changed
- result.checksum == '5feac65e442c91f557fc90069ce6efc4d346ab51'
- result.state == 'file'
- name: Insert a line in the file using an empty string as a regular expression
lineinfile:
path: "{{ remote_tmp_dir }}/test.txt"
regexp: ''
line: This is line 6
register: insert_empty_regexp
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: Assert that the file contents match what is expected and a warning was displayed
assert:
that:
- insert_empty_regexp is changed
- warning_message in insert_empty_regexp.warnings
- result.stat.checksum == '23555a98ceaa88756b4c7c7bba49d9f86eed868f'
vars:
warning_message: >-
The regular expression is an empty string, which will match every line in the file.
This may have unintended consequences, such as replacing the last line in the file rather than appending.
If this is desired, use '^' to match every line in the file and avoid this warning.
###################################################################
# When using an empty search string, replace the last line (since it matches every line)
# but also provide a warning.
- name: Deploy the test file for lineinfile
copy:
src: teststring.txt
dest: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: Assert that the test file was deployed
assert:
that:
- result is changed
- result.checksum == '481c2b73fe062390afdd294063a4f8285d69ac85'
- result.state == 'file'
- name: Insert a line in the file using an empty string as a search string
lineinfile:
path: "{{ remote_tmp_dir }}/teststring.txt"
search_string: ''
line: This is line 6
register: insert_empty_literal
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: Assert that the file contents match what is expected and a warning was displayed
assert:
that:
- insert_empty_literal is changed
- warning_message in insert_empty_literal.warnings
- result.stat.checksum == 'eaa79f878557d4bd8d96787a850526a0facab342'
vars:
warning_message: >-
The search string is an empty string, which will match every line in the file.
This may have unintended consequences, such as replacing the last line in the file rather than appending.
- name: meta
meta: end_play
###################################################################
## Issue #58923
## Using firstmatch with insertafter and ensure multiple lines are not inserted
- name: Deploy the firstmatch test file
copy:
src: firstmatch.txt
dest: "{{ remote_tmp_dir }}/firstmatch.txt"
register: result
- name: Assert that the test file was deployed
assert:
that:
- result is changed
- result.checksum == '1d644e5e2e51c67f1bd12d7bbe2686017f39923d'
- result.state == 'file'
- name: Insert a line before an existing line using firstmatch
lineinfile:
path: "{{ remote_tmp_dir }}/firstmatch.txt"
line: INSERT
insertafter: line1
firstmatch: yes
register: insertafter1
- name: Insert a line before an existing line using firstmatch again
lineinfile:
path: "{{ remote_tmp_dir }}/firstmatch.txt"
line: INSERT
insertafter: line1
firstmatch: yes
register: insertafter2
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/firstmatch.txt"
register: result
- name: Assert that the file was modified appropriately
assert:
that:
- insertafter1 is changed
- insertafter2 is not changed
- result.stat.checksum == '114aae024073a3ee8ec8db0ada03c5483326dd86'
########################################################################################
# Tests of fixing the same issue as above (#58923) by @Andersson007 <[email protected]>
# and @samdoran <[email protected]>:
# Test insertafter with regexp
- name: Deploy the test file
copy:
src: test_58923.txt
dest: "{{ remote_tmp_dir }}/test_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
assert:
that:
- initial_file is changed
- initial_file.checksum == 'b6379ba43261c451a62102acb2c7f438a177c66e'
- initial_file.state == 'file'
# Regarding the documentation:
# If regular expressions are passed to both regexp and
# insertafter, insertafter is only honored if no match for regexp is found.
# Therefore,
# when regular expressions are passed to both regexp and insertafter, then:
# 1. regexp was found -> ignore insertafter, replace the founded line
# 2. regexp was not found -> insert the line after 'insertafter' line
# Regexp is not present in the file, so the line must be inserted after ^#!/bin/sh
- name: Add the line using firstmatch, regexp, and insertafter
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertafter: '^#!/bin/sh'
regexp: ^export FISHEYE_OPTS
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertafter_test1
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertafter_test1_file
- name: Add the line using firstmatch, regexp, and insertafter again
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertafter: '^#!/bin/sh'
regexp: ^export FISHEYE_OPTS
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertafter_test2
# Check of the prev step.
# We tried to add the same line with the same playbook,
# so nothing has been added:
- name: Stat the file again
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertafter_test2_file
- name: Assert insertafter tests gave the expected results
assert:
that:
- insertafter_test1 is changed
- insertafter_test1_file.stat.checksum == '9232aed6fe88714964d9e29d13e42cd782070b08'
- insertafter_test2 is not changed
- insertafter_test2_file.stat.checksum == '9232aed6fe88714964d9e29d13e42cd782070b08'
# Test insertafter without regexp
- name: Deploy the test file
copy:
src: test_58923.txt
dest: "{{ remote_tmp_dir }}/test_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
assert:
that:
- initial_file is changed
- initial_file.checksum == 'b6379ba43261c451a62102acb2c7f438a177c66e'
- initial_file.state == 'file'
- name: Insert the line using firstmatch and insertafter without regexp
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertafter: '^#!/bin/sh'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertafter_test3
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertafter_test3_file
- name: Insert the line using firstmatch and insertafter without regexp again
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertafter: '^#!/bin/sh'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertafter_test4
- name: Stat the file again
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertafter_test4_file
- name: Assert insertafter without regexp tests gave the expected results
assert:
that:
- insertafter_test3 is changed
- insertafter_test3_file.stat.checksum == '9232aed6fe88714964d9e29d13e42cd782070b08'
- insertafter_test4 is not changed
- insertafter_test4_file.stat.checksum == '9232aed6fe88714964d9e29d13e42cd782070b08'
# Test insertbefore with regexp
- name: Deploy the test file
copy:
src: test_58923.txt
dest: "{{ remote_tmp_dir }}/test_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
assert:
that:
- initial_file is changed
- initial_file.checksum == 'b6379ba43261c451a62102acb2c7f438a177c66e'
- initial_file.state == 'file'
- name: Add the line using regexp, firstmatch, and insertbefore
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: '^#!/bin/sh'
regexp: ^export FISHEYE_OPTS
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertbefore_test1
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test1_file
- name: Add the line using regexp, firstmatch, and insertbefore again
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: '^#!/bin/sh'
regexp: ^export FISHEYE_OPTS
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertbefore_test2
- name: Stat the file again
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test2_file
- name: Assert insertbefore with regexp tests gave the expected results
assert:
that:
- insertbefore_test1 is changed
- insertbefore_test1_file.stat.checksum == '3c6630b9d44f561ea9ad999be56a7504cadc12f7'
- insertbefore_test2 is not changed
- insertbefore_test2_file.stat.checksum == '3c6630b9d44f561ea9ad999be56a7504cadc12f7'
# Test insertbefore without regexp
- name: Deploy the test file
copy:
src: test_58923.txt
dest: "{{ remote_tmp_dir }}/test_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
assert:
that:
- initial_file is changed
- initial_file.checksum == 'b6379ba43261c451a62102acb2c7f438a177c66e'
- initial_file.state == 'file'
- name: Add the line using insertbefore and firstmatch
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: '^#!/bin/sh'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertbefore_test3
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test3_file
- name: Add the line using insertbefore and firstmatch again
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: '^#!/bin/sh'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertbefore_test4
- name: Stat the file again
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test4_file
# Test when the line is presented in the file but
# not in the before/after spot and it does match the regexp:
- name: >
Add the line using insertbefore and firstmatch when the regexp line
is presented but not close to insertbefore spot
lineinfile:
path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: ' Darwin\*\) if \[ -z \"\$JAVA_HOME\" \] ; then'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertbefore_test5
- name: Stat the file again
stat:
path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test5_file
- name: Assert insertbefore with regexp tests gave the expected results
assert:
that:
- insertbefore_test3 is changed
- insertbefore_test3_file.stat.checksum == '3c6630b9d44f561ea9ad999be56a7504cadc12f7'
- insertbefore_test4 is not changed
- insertbefore_test4_file.stat.checksum == '3c6630b9d44f561ea9ad999be56a7504cadc12f7'
- insertbefore_test5 is not changed
- insertbefore_test5_file.stat.checksum == '3c6630b9d44f561ea9ad999be56a7504cadc12f7'
########################################################################################
# Same tests for literal
# Test insertafter with literal
- name: Deploy the test file
copy:
src: teststring_58923.txt
dest: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
assert:
that:
- initial_file is changed
- initial_file.checksum == 'b6379ba43261c451a62102acb2c7f438a177c66e'
- initial_file.state == 'file'
# Regarding the documentation:
# If the search string is passed to both search_string and
# insertafter, insertafter is only honored if no match for search_string is found.
# Therefore,
# when search_string expressions are passed to both search_string and insertafter, then:
# 1. search_string was found -> ignore insertafter, replace the founded line
# 2. search_string was not found -> insert the line after 'insertafter' line
# literal is not present in the file, so the line must be inserted after ^#!/bin/sh
- name: Add the line using firstmatch, regexp, and insertafter
lineinfile:
path: "{{ remote_tmp_dir }}/teststring_58923.txt"
insertafter: '^#!/bin/sh'
search_string: export FISHEYE_OPTS
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertafter_test1
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: insertafter_test1_file
- name: Add the line using firstmatch, literal, and insertafter again
lineinfile:
path: "{{ remote_tmp_dir }}/teststring_58923.txt"
insertafter: '^#!/bin/sh'
search_string: export FISHEYE_OPTS
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertafter_test2
# Check of the prev step.
# We tried to add the same line with the same playbook,
# so nothing has been added:
- name: Stat the file again
stat:
path: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: insertafter_test2_file
- name: Assert insertafter tests gave the expected results
assert:
that:
- insertafter_test1 is changed
- insertafter_test1_file.stat.checksum == '9232aed6fe88714964d9e29d13e42cd782070b08'
- insertafter_test2 is not changed
- insertafter_test2_file.stat.checksum == '9232aed6fe88714964d9e29d13e42cd782070b08'
# Test insertbefore with literal
- name: Deploy the test file
copy:
src: teststring_58923.txt
dest: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
assert:
that:
- initial_file is changed
- initial_file.checksum == 'b6379ba43261c451a62102acb2c7f438a177c66e'
- initial_file.state == 'file'
- name: Add the line using literal, firstmatch, and insertbefore
lineinfile:
path: "{{ remote_tmp_dir }}/teststring_58923.txt"
insertbefore: '^#!/bin/sh'
search_string: export FISHEYE_OPTS
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertbefore_test1
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: insertbefore_test1_file
- name: Add the line using literal, firstmatch, and insertbefore again
lineinfile:
path: "{{ remote_tmp_dir }}/teststring_58923.txt"
insertbefore: '^#!/bin/sh'
search_string: export FISHEYE_OPTS
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
register: insertbefore_test2
- name: Stat the file again
stat:
path: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: insertbefore_test2_file
- name: Assert insertbefore with literal tests gave the expected results
assert:
that:
- insertbefore_test1 is changed
- insertbefore_test1_file.stat.checksum == '3c6630b9d44f561ea9ad999be56a7504cadc12f7'
- insertbefore_test2 is not changed
- insertbefore_test2_file.stat.checksum == '3c6630b9d44f561ea9ad999be56a7504cadc12f7'
# Test inserting a line at the end of the file using regexp with insertafter
# https://github.com/ansible/ansible/issues/63684
- name: Create a file by inserting a line
lineinfile:
path: "{{ remote_tmp_dir }}/testend.txt"
create: yes
line: testline
register: testend1
- name: Insert a line at the end of the file
lineinfile:
path: "{{ remote_tmp_dir }}/testend.txt"
insertafter: testline
regexp: line at the end
line: line at the end
register: testend2
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/testend.txt"
register: testend_file
- name: Assert inserting at the end gave the expected results.
assert:
that:
- testend1 is changed
- testend2 is changed
- testend_file.stat.checksum == 'ef36116966836ce04f6b249fd1837706acae4e19'
# Test inserting a line at the end of the file using search_string with insertafter
- name: Create a file by inserting a line
lineinfile:
path: "{{ remote_tmp_dir }}/testendliteral.txt"
create: yes
line: testline
register: testend1
- name: Insert a line at the end of the file
lineinfile:
path: "{{ remote_tmp_dir }}/testendliteral.txt"
insertafter: testline
search_string: line at the end
line: line at the end
register: testend2
- name: Stat the file
stat:
path: "{{ remote_tmp_dir }}/testendliteral.txt"
register: testend_file
- name: Assert inserting at the end gave the expected results.
assert:
that:
- testend1 is changed
- testend2 is changed
- testend_file.stat.checksum == 'ef36116966836ce04f6b249fd1837706acae4e19'
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,359 |
Assemble module doesn't pass `content` arg to `_get_diff_data`
|
### Summary
When using the `ansible.builtin.assemble` module with `--diff`, the task fails with the following error:
> Unexpected failure during module execution: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
`ansible-playbook -vvvv` says the relevant function call happens in [ansible/plugins/action/assemble.py, line 143](https://github.com/ansible/ansible/blob/6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0/lib/ansible/plugins/action/assemble.py#L143).
Note: on my local machine that's currently line 144; 143 is on the current devel branch.
The last known working version is 8.6.1
### Issue Type
Bug Report
### Component Name
ansible.builtin.assemble
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = None
configured module search path = ['/Users/albalitz/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible
ansible collection location = /Users/albalitz/.ansible/collections:/usr/share/ansible/collections
executable location = /opt/homebrew/bin/ansible
python version = 3.12.0 (main, Oct 3 2023, 16:20:33) [Clang 14.0.3 (clang-1403.0.22.14.1)] (/opt/homebrew/Cellar/ansible/9.0.1/libexec/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = /usr/bin/vim
PAGER(env: PAGER) = less
```
### OS / Environment
This happens on my local machine running MacOS Sonoma 14.1.2 (Ansible installed via homebrew) as well as our CI system running in a `python:alpine`-based Docker environment with the same Ansible version as above (Ansible is installed via pip there and updated semi-automatically using renovatebot).
### Steps to Reproduce
This step fails with the error described above:
```yaml
- name: create concatenated file
local_action:
module: assemble
remote_src: false
src: files/some_files/
dest: /tmp/concatenated_file
no_log: true
changed_when: false
check_mode: no
become: no
run_once: true
```
The step works when `--diff` is removed from the `ansible-playbook` command.
### Expected Results
I expected the `assemble` step to run successfully and produce a concatenated file with `--diff` enabled but without printing the diff (due to `no_log: true` - I set that to `false` for debugging purposes to see the error message).
### Actual Results
```console
<localhost> ESTABLISH LOCAL CONNECTION FOR USER: albalitz
<localhost> EXEC /bin/sh -c 'echo ~albalitz && sleep 0'
<localhost> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo /Users/albalitz/.ansible/tmp `"&& mkdir "` echo /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079 `" && echo ansible-tmp-1701782995.5552058-23181-40564253413079="` echo /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079 `" ) && sleep 0'
Using module file /opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/modules/stat.py
<localhost> PUT /Users/albalitz/.ansible/tmp/ansible-local-23119muc1g04o/tmp8owc8yoz TO /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py
<localhost> EXEC /bin/sh -c 'chmod u+x /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/ /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py && sleep 0'
<localhost> EXEC /bin/sh -c '/opt/homebrew/Cellar/ansible/9.0.1/libexec/bin/python /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py && sleep 0'
<localhost> EXEC /bin/sh -c 'rm -f -r /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/ > /dev/null 2>&1 && sleep 0'
The full traceback is:
Traceback (most recent call last):
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/executor/task_executor.py", line 165, in run
res = self._execute()
^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/executor/task_executor.py", line 641, in _execute
result = self._handler.run(task_vars=vars_copy)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/plugins/action/assemble.py", line 144, in run
diff = self._get_diff_data(dest, path, task_vars)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
fatal: [shorewall-0 -> localhost]: FAILED! => {}
MSG:
Unexpected failure during module execution: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82359
|
https://github.com/ansible/ansible/pull/82360
|
a9919dd7f62c9efe17b8acaebf7c627606ae9f66
|
7f2ad7eea673233223948e0d2a9fc5ee683040ce
| 2023-12-05T13:53:38Z |
python
| 2023-12-12T16:22:23Z |
changelogs/fragments/82359_assemble_diff.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,359 |
Assemble module doesn't pass `content` arg to `_get_diff_data`
|
### Summary
When using the `ansible.builtin.assemble` module with `--diff`, the task fails with the following error:
> Unexpected failure during module execution: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
`ansible-playbook -vvvv` says the relevant function call happens in [ansible/plugins/action/assemble.py, line 143](https://github.com/ansible/ansible/blob/6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0/lib/ansible/plugins/action/assemble.py#L143).
Note: on my local machine that's currently line 144; 143 is on the current devel branch.
The last known working version is 8.6.1
### Issue Type
Bug Report
### Component Name
ansible.builtin.assemble
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = None
configured module search path = ['/Users/albalitz/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible
ansible collection location = /Users/albalitz/.ansible/collections:/usr/share/ansible/collections
executable location = /opt/homebrew/bin/ansible
python version = 3.12.0 (main, Oct 3 2023, 16:20:33) [Clang 14.0.3 (clang-1403.0.22.14.1)] (/opt/homebrew/Cellar/ansible/9.0.1/libexec/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = /usr/bin/vim
PAGER(env: PAGER) = less
```
### OS / Environment
This happens on my local machine running MacOS Sonoma 14.1.2 (Ansible installed via homebrew) as well as our CI system running in a `python:alpine`-based Docker environment with the same Ansible version as above (Ansible is installed via pip there and updated semi-automatically using renovatebot).
### Steps to Reproduce
This step fails with the error described above:
```yaml
- name: create concatenated file
local_action:
module: assemble
remote_src: false
src: files/some_files/
dest: /tmp/concatenated_file
no_log: true
changed_when: false
check_mode: no
become: no
run_once: true
```
The step works when `--diff` is removed from the `ansible-playbook` command.
### Expected Results
I expected the `assemble` step to run successfully and produce a concatenated file with `--diff` enabled but without printing the diff (due to `no_log: true` - I set that to `false` for debugging purposes to see the error message).
### Actual Results
```console
<localhost> ESTABLISH LOCAL CONNECTION FOR USER: albalitz
<localhost> EXEC /bin/sh -c 'echo ~albalitz && sleep 0'
<localhost> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo /Users/albalitz/.ansible/tmp `"&& mkdir "` echo /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079 `" && echo ansible-tmp-1701782995.5552058-23181-40564253413079="` echo /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079 `" ) && sleep 0'
Using module file /opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/modules/stat.py
<localhost> PUT /Users/albalitz/.ansible/tmp/ansible-local-23119muc1g04o/tmp8owc8yoz TO /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py
<localhost> EXEC /bin/sh -c 'chmod u+x /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/ /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py && sleep 0'
<localhost> EXEC /bin/sh -c '/opt/homebrew/Cellar/ansible/9.0.1/libexec/bin/python /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py && sleep 0'
<localhost> EXEC /bin/sh -c 'rm -f -r /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/ > /dev/null 2>&1 && sleep 0'
The full traceback is:
Traceback (most recent call last):
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/executor/task_executor.py", line 165, in run
res = self._execute()
^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/executor/task_executor.py", line 641, in _execute
result = self._handler.run(task_vars=vars_copy)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/plugins/action/assemble.py", line 144, in run
diff = self._get_diff_data(dest, path, task_vars)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
fatal: [shorewall-0 -> localhost]: FAILED! => {}
MSG:
Unexpected failure during module execution: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82359
|
https://github.com/ansible/ansible/pull/82360
|
a9919dd7f62c9efe17b8acaebf7c627606ae9f66
|
7f2ad7eea673233223948e0d2a9fc5ee683040ce
| 2023-12-05T13:53:38Z |
python
| 2023-12-12T16:22:23Z |
lib/ansible/plugins/action/__init__.py
|
# coding: utf-8
# Copyright: (c) 2012-2014, Michael DeHaan <[email protected]>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import base64
import json
import os
import random
import re
import shlex
import stat
import tempfile
from abc import ABC, abstractmethod
from collections.abc import Sequence
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleActionSkip, AnsibleActionFail, AnsibleAuthenticationFailure
from ansible.executor.module_common import modify_module
from ansible.executor.interpreter_discovery import discover_interpreter, InterpreterDiscoveryRequiredError
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator
from ansible.module_utils.errors import UnsupportedError
from ansible.module_utils.json_utils import _filter_non_json_lines
from ansible.module_utils.six import binary_type, string_types, text_type
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.parsing.utils.jsonify import jsonify
from ansible.release import __version__
from ansible.utils.collection_loader import resource_from_fqcr
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import wrap_var, AnsibleUnsafeText
from ansible.vars.clean import remove_internal_keys
from ansible.utils.plugin_docs import get_versioned_doclink
display = Display()
def _validate_utf8_json(d):
if isinstance(d, text_type):
# Purposefully not using to_bytes here for performance reasons
d.encode(encoding='utf-8', errors='strict')
elif isinstance(d, dict):
for o in d.items():
_validate_utf8_json(o)
elif isinstance(d, (list, tuple)):
for o in d:
_validate_utf8_json(o)
class ActionBase(ABC):
'''
This class is the base class for all action plugins, and defines
code common to all actions. The base class handles the connection
by putting/getting files and executing commands based on the current
action in use.
'''
# A set of valid arguments
_VALID_ARGS = frozenset([]) # type: frozenset[str]
# behavioral attributes
BYPASS_HOST_LOOP = False
TRANSFERS_FILES = False
_requires_connection = True
_supports_check_mode = True
_supports_async = False
def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
self._task = task
self._connection = connection
self._play_context = play_context
self._loader = loader
self._templar = templar
self._shared_loader_obj = shared_loader_obj
self._cleanup_remote_tmp = False
# interpreter discovery state
self._discovered_interpreter_key = None
self._discovered_interpreter = False
self._discovery_deprecation_warnings = []
self._discovery_warnings = []
self._used_interpreter = None
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
@abstractmethod
def run(self, tmp=None, task_vars=None):
""" Action Plugins should implement this method to perform their
tasks. Everything else in this base class is a helper method for the
action plugin to do that.
:kwarg tmp: Deprecated parameter. This is no longer used. An action plugin that calls
another one and wants to use the same remote tmp for both should set
self._connection._shell.tmpdir rather than this parameter.
:kwarg task_vars: The variables (host vars, group vars, config vars,
etc) associated with this task.
:returns: dictionary of results from the module
Implementers of action modules may find the following variables especially useful:
* Module parameters. These are stored in self._task.args
"""
# does not default to {'changed': False, 'failed': False}, as it breaks async
result = {}
if tmp is not None:
result['warning'] = ['ActionModule.run() no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir']
del tmp
if self._task.async_val and not self._supports_async:
raise AnsibleActionFail('async is not supported for this task.')
elif self._task.check_mode and not self._supports_check_mode:
raise AnsibleActionSkip('check mode is not supported for this task.')
elif self._task.async_val and self._task.check_mode:
raise AnsibleActionFail('check mode and async cannot be used on same task.')
# Error if invalid argument is passed
if self._VALID_ARGS:
task_opts = frozenset(self._task.args.keys())
bad_opts = task_opts.difference(self._VALID_ARGS)
if bad_opts:
raise AnsibleActionFail('Invalid options for %s: %s' % (self._task.action, ','.join(list(bad_opts))))
if self._connection._shell.tmpdir is None and self._early_needs_tmp_path():
self._make_tmp_path()
return result
def validate_argument_spec(self, argument_spec=None,
mutually_exclusive=None,
required_together=None,
required_one_of=None,
required_if=None,
required_by=None,
):
"""Validate an argument spec against the task args
This will return a tuple of (ValidationResult, dict) where the dict
is the validated, coerced, and normalized task args.
Be cautious when directly passing ``new_module_args`` directly to a
module invocation, as it will contain the defaults, and not only
the args supplied from the task. If you do this, the module
should not define ``mututally_exclusive`` or similar.
This code is roughly copied from the ``validate_argument_spec``
action plugin for use by other action plugins.
"""
new_module_args = self._task.args.copy()
validator = ArgumentSpecValidator(
argument_spec,
mutually_exclusive=mutually_exclusive,
required_together=required_together,
required_one_of=required_one_of,
required_if=required_if,
required_by=required_by,
)
validation_result = validator.validate(new_module_args)
new_module_args.update(validation_result.validated_parameters)
try:
error = validation_result.errors[0]
except IndexError:
error = None
# Fail for validation errors, even in check mode
if error:
msg = validation_result.errors.msg
if isinstance(error, UnsupportedError):
msg = f"Unsupported parameters for ({self._load_name}) module: {msg}"
raise AnsibleActionFail(msg)
return validation_result, new_module_args
def cleanup(self, force=False):
"""Method to perform a clean up at the end of an action plugin execution
By default this is designed to clean up the shell tmpdir, and is toggled based on whether
async is in use
Action plugins may override this if they deem necessary, but should still call this method
via super
"""
if force or not self._task.async_val:
self._remove_tmp_path(self._connection._shell.tmpdir)
def get_plugin_option(self, plugin, option, default=None):
"""Helper to get an option from a plugin without having to use
the try/except dance everywhere to set a default
"""
try:
return plugin.get_option(option)
except (AttributeError, KeyError):
return default
def get_become_option(self, option, default=None):
return self.get_plugin_option(self._connection.become, option, default=default)
def get_connection_option(self, option, default=None):
return self.get_plugin_option(self._connection, option, default=default)
def get_shell_option(self, option, default=None):
return self.get_plugin_option(self._connection._shell, option, default=default)
def _remote_file_exists(self, path):
cmd = self._connection._shell.exists(path)
result = self._low_level_execute_command(cmd=cmd, sudoable=True)
if result['rc'] == 0:
return True
return False
def _configure_module(self, module_name, module_args, task_vars):
'''
Handles the loading and templating of the module code through the
modify_module() function.
'''
if self._task.delegate_to:
use_vars = task_vars.get('ansible_delegated_vars')[self._task.delegate_to]
else:
use_vars = task_vars
split_module_name = module_name.split('.')
collection_name = '.'.join(split_module_name[0:2]) if len(split_module_name) > 2 else ''
leaf_module_name = resource_from_fqcr(module_name)
# Search module path(s) for named module.
for mod_type in self._connection.module_implementation_preferences:
# Check to determine if PowerShell modules are supported, and apply
# some fixes (hacks) to module name + args.
if mod_type == '.ps1':
# FIXME: This should be temporary and moved to an exec subsystem plugin where we can define the mapping
# for each subsystem.
win_collection = 'ansible.windows'
rewrite_collection_names = ['ansible.builtin', 'ansible.legacy', '']
# async_status, win_stat, win_file, win_copy, and win_ping are not just like their
# python counterparts but they are compatible enough for our
# internal usage
# NB: we only rewrite the module if it's not being called by the user (eg, an action calling something else)
# and if it's unqualified or FQ to a builtin
if leaf_module_name in ('stat', 'file', 'copy', 'ping') and \
collection_name in rewrite_collection_names and self._task.action != module_name:
module_name = '%s.win_%s' % (win_collection, leaf_module_name)
elif leaf_module_name == 'async_status' and collection_name in rewrite_collection_names:
module_name = '%s.%s' % (win_collection, leaf_module_name)
# TODO: move this tweak down to the modules, not extensible here
# Remove extra quotes surrounding path parameters before sending to module.
if leaf_module_name in ['win_stat', 'win_file', 'win_copy', 'slurp'] and module_args and \
hasattr(self._connection._shell, '_unquote'):
for key in ('src', 'dest', 'path'):
if key in module_args:
module_args[key] = self._connection._shell._unquote(module_args[key])
result = self._shared_loader_obj.module_loader.find_plugin_with_context(module_name, mod_type, collection_list=self._task.collections)
if not result.resolved:
if result.redirect_list and len(result.redirect_list) > 1:
# take the last one in the redirect list, we may have successfully jumped through N other redirects
target_module_name = result.redirect_list[-1]
raise AnsibleError("The module {0} was redirected to {1}, which could not be loaded.".format(module_name, target_module_name))
module_path = result.plugin_resolved_path
if module_path:
break
else: # This is a for-else: http://bit.ly/1ElPkyg
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
# insert shared code and arguments into the module
final_environment = dict()
self._compute_environment_string(final_environment)
become_kwargs = {}
if self._connection.become:
become_kwargs['become'] = True
become_kwargs['become_method'] = self._connection.become.name
become_kwargs['become_user'] = self._connection.become.get_option('become_user',
playcontext=self._play_context)
become_kwargs['become_password'] = self._connection.become.get_option('become_pass',
playcontext=self._play_context)
become_kwargs['become_flags'] = self._connection.become.get_option('become_flags',
playcontext=self._play_context)
# modify_module will exit early if interpreter discovery is required; re-run after if necessary
for dummy in (1, 2):
try:
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args, self._templar,
task_vars=use_vars,
module_compression=C.config.get_config_value('DEFAULT_MODULE_COMPRESSION',
variables=task_vars),
async_timeout=self._task.async_val,
environment=final_environment,
remote_is_local=bool(getattr(self._connection, '_remote_is_local', False)),
**become_kwargs)
break
except InterpreterDiscoveryRequiredError as idre:
self._discovered_interpreter = AnsibleUnsafeText(discover_interpreter(
action=self,
interpreter_name=idre.interpreter_name,
discovery_mode=idre.discovery_mode,
task_vars=use_vars))
# update the local task_vars with the discovered interpreter (which might be None);
# we'll propagate back to the controller in the task result
discovered_key = 'discovered_interpreter_%s' % idre.interpreter_name
# update the local vars copy for the retry
use_vars['ansible_facts'][discovered_key] = self._discovered_interpreter
# TODO: this condition prevents 'wrong host' from being updated
# but in future we would want to be able to update 'delegated host facts'
# irrespective of task settings
if not self._task.delegate_to or self._task.delegate_facts:
# store in local task_vars facts collection for the retry and any other usages in this worker
task_vars['ansible_facts'][discovered_key] = self._discovered_interpreter
# preserve this so _execute_module can propagate back to controller as a fact
self._discovered_interpreter_key = discovered_key
else:
task_vars['ansible_delegated_vars'][self._task.delegate_to]['ansible_facts'][discovered_key] = self._discovered_interpreter
return (module_style, module_shebang, module_data, module_path)
def _compute_environment_string(self, raw_environment_out=None):
'''
Builds the environment string to be used when executing the remote task.
'''
final_environment = dict()
if self._task.environment is not None:
environments = self._task.environment
if not isinstance(environments, list):
environments = [environments]
# The order of environments matters to make sure we merge
# in the parent's values first so those in the block then
# task 'win' in precedence
for environment in environments:
if environment is None or len(environment) == 0:
continue
temp_environment = self._templar.template(environment)
if not isinstance(temp_environment, dict):
raise AnsibleError("environment must be a dictionary, received %s (%s)" % (temp_environment, type(temp_environment)))
# very deliberately using update here instead of combine_vars, as
# these environment settings should not need to merge sub-dicts
final_environment.update(temp_environment)
if len(final_environment) > 0:
final_environment = self._templar.template(final_environment)
if isinstance(raw_environment_out, dict):
raw_environment_out.clear()
raw_environment_out.update(final_environment)
return self._connection._shell.env_prefix(**final_environment)
def _early_needs_tmp_path(self):
'''
Determines if a tmp path should be created before the action is executed.
'''
return getattr(self, 'TRANSFERS_FILES', False)
def _is_pipelining_enabled(self, module_style, wrap_async=False):
'''
Determines if we are required and can do pipelining
'''
try:
is_enabled = self._connection.get_option('pipelining')
except (KeyError, AttributeError, ValueError):
is_enabled = self._play_context.pipelining
# winrm supports async pipeline
# TODO: make other class property 'has_async_pipelining' to separate cases
always_pipeline = self._connection.always_pipeline_modules
# su does not work with pipelining
# TODO: add has_pipelining class prop to become plugins
become_exception = (self._connection.become.name if self._connection.become else '') != 'su'
# any of these require a true
conditions = [
self._connection.has_pipelining, # connection class supports it
is_enabled or always_pipeline, # enabled via config or forced via connection (eg winrm)
module_style == "new", # old style modules do not support pipelining
not C.DEFAULT_KEEP_REMOTE_FILES, # user wants remote files
not wrap_async or always_pipeline, # async does not normally support pipelining unless it does (eg winrm)
become_exception,
]
return all(conditions)
def _get_admin_users(self):
'''
Returns a list of admin users that are configured for the current shell
plugin
'''
return self.get_shell_option('admin_users', ['root'])
def _get_remote_addr(self, tvars):
''' consistently get the 'remote_address' for the action plugin '''
remote_addr = tvars.get('delegated_vars', {}).get('ansible_host', tvars.get('ansible_host', tvars.get('inventory_hostname', None)))
for variation in ('remote_addr', 'host'):
try:
remote_addr = self._connection.get_option(variation)
except KeyError:
continue
break
else:
# plugin does not have, fallback to play_context
remote_addr = self._play_context.remote_addr
return remote_addr
def _get_remote_user(self):
''' consistently get the 'remote_user' for the action plugin '''
# TODO: use 'current user running ansible' as fallback when moving away from play_context
# pwd.getpwuid(os.getuid()).pw_name
remote_user = None
try:
remote_user = self._connection.get_option('remote_user')
except KeyError:
# plugin does not have remote_user option, fallback to default and/play_context
remote_user = getattr(self._connection, 'default_user', None) or self._play_context.remote_user
except AttributeError:
# plugin does not use config system, fallback to old play_context
remote_user = self._play_context.remote_user
return remote_user
def _is_become_unprivileged(self):
'''
The user is not the same as the connection user and is not part of the
shell configured admin users
'''
# if we don't use become then we know we aren't switching to a
# different unprivileged user
if not self._connection.become:
return False
# if we use become and the user is not an admin (or same user) then
# we need to return become_unprivileged as True
admin_users = self._get_admin_users()
remote_user = self._get_remote_user()
become_user = self.get_become_option('become_user')
return bool(become_user and become_user not in admin_users + [remote_user])
def _make_tmp_path(self, remote_user=None):
'''
Create and return a temporary path on a remote box.
'''
# Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
# As such, we want to avoid using remote_user for paths as remote_user may not line up with the local user
# This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
if getattr(self._connection, '_remote_is_local', False):
tmpdir = C.DEFAULT_LOCAL_TMP
else:
# NOTE: shell plugins should populate this setting anyways, but they dont do remote expansion, which
# we need for 'non posix' systems like cloud-init and solaris
tmpdir = self._remote_expand_user(self.get_shell_option('remote_tmp', default='~/.ansible/tmp'), sudoable=False)
become_unprivileged = self._is_become_unprivileged()
basefile = self._connection._shell._generate_temp_dir_name()
cmd = self._connection._shell.mkdtemp(basefile=basefile, system=become_unprivileged, tmpdir=tmpdir)
result = self._low_level_execute_command(cmd, sudoable=False)
# error handling on this seems a little aggressive?
if result['rc'] != 0:
if result['rc'] == 5:
output = 'Authentication failure.'
elif result['rc'] == 255 and self._connection.transport in ('ssh',):
if display.verbosity > 3:
output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
else:
output = (u'SSH encountered an unknown error during the connection. '
'We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
elif u'No space left on device' in result['stderr']:
output = result['stderr']
else:
output = ('Failed to create temporary directory. '
'In some cases, you may have been able to authenticate and did not have permissions on the target directory. '
'Consider changing the remote tmp path in ansible.cfg to a path rooted in "/tmp", for more error information use -vvv. '
'Failed command was: %s, exited with result %d' % (cmd, result['rc']))
if 'stdout' in result and result['stdout'] != u'':
output = output + u", stdout output: %s" % result['stdout']
if display.verbosity > 3 and 'stderr' in result and result['stderr'] != u'':
output += u", stderr output: %s" % result['stderr']
raise AnsibleConnectionFailure(output)
else:
self._cleanup_remote_tmp = True
try:
stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
rc = self._connection._shell.join_path(stdout_parts[-1], u'').splitlines()[-1]
except IndexError:
# stdout was empty or just space, set to / to trigger error in next if
rc = '/'
# Catch failure conditions, files should never be
# written to locations in /.
if rc == '/':
raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
self._connection._shell.tmpdir = rc
return rc
def _should_remove_tmp_path(self, tmp_path):
'''Determine if temporary path should be deleted or kept by user request/config'''
return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path
def _remove_tmp_path(self, tmp_path, force=False):
'''Remove a temporary path we created. '''
if tmp_path is None and self._connection._shell.tmpdir:
tmp_path = self._connection._shell.tmpdir
if force or self._should_remove_tmp_path(tmp_path):
cmd = self._connection._shell.remove(tmp_path, recurse=True)
# If we have gotten here we have a working connection configuration.
# If the connection breaks we could leave tmp directories out on the remote system.
tmp_rm_res = self._low_level_execute_command(cmd, sudoable=False)
if tmp_rm_res.get('rc', 0) != 0:
display.warning('Error deleting remote temporary files (rc: %s, stderr: %s})'
% (tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.')))
else:
self._connection._shell.tmpdir = None
def _transfer_file(self, local_path, remote_path):
"""
Copy a file from the controller to a remote path
:arg local_path: Path on controller to transfer
:arg remote_path: Path on the remote system to transfer into
.. warning::
* When you use this function you likely want to use use fixup_perms2() on the
remote_path to make sure that the remote file is readable when the user becomes
a non-privileged user.
* If you use fixup_perms2() on the file and copy or move the file into place, you will
need to then remove filesystem acls on the file once it has been copied into place by
the module. See how the copy module implements this for help.
"""
self._connection.put_file(local_path, remote_path)
return remote_path
def _transfer_data(self, remote_path, data):
'''
Copies the module data out to the temporary module path.
'''
if isinstance(data, dict):
data = jsonify(data)
afd, afile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
afo = os.fdopen(afd, 'wb')
try:
data = to_bytes(data, errors='surrogate_or_strict')
afo.write(data)
except Exception as e:
raise AnsibleError("failure writing module data to temporary file for transfer: %s" % to_native(e))
afo.flush()
afo.close()
try:
self._transfer_file(afile, remote_path)
finally:
os.unlink(afile)
return remote_path
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
"""
We need the files we upload to be readable (and sometimes executable)
by the user being sudo'd to but we want to limit other people's access
(because the files could contain passwords or other private
information. We achieve this in one of these ways:
* If no sudo is performed or the remote_user is sudo'ing to
themselves, we don't have to change permissions.
* If the remote_user sudo's to a privileged user (for instance, root),
we don't have to change permissions
* If the remote_user sudo's to an unprivileged user then we attempt to
grant the unprivileged user access via file system acls.
* If granting file system acls fails we try to change the owner of the
file with chown which only works in case the remote_user is
privileged or the remote systems allows chown calls by unprivileged
users (e.g. HP-UX)
* If the above fails, we next try 'chmod +a' which is a macOS way of
setting ACLs on files.
* If the above fails, we check if ansible_common_remote_group is set.
If it is, we attempt to chgrp the file to its value. This is useful
if the remote_user has a group in common with the become_user. As the
remote_user, we can chgrp the file to that group and allow the
become_user to read it.
* If (the chown fails AND ansible_common_remote_group is not set) OR
(ansible_common_remote_group is set AND the chgrp (or following chmod)
returned non-zero), we can set the file to be world readable so that
the second unprivileged user can read the file.
Since this could allow other users to get access to private
information we only do this if ansible is configured with
"allow_world_readable_tmpfiles" in the ansible.cfg. Also note that
when ansible_common_remote_group is set this final fallback is very
unlikely to ever be triggered, so long as chgrp was successful. But
just because the chgrp was successful, does not mean Ansible can
necessarily access the files (if, for example, the variable was set
to a group that remote_user is in, and can chgrp to, but does not have
in common with become_user).
"""
if remote_user is None:
remote_user = self._get_remote_user()
# Step 1: Are we on windows?
if getattr(self._connection._shell, "_IS_WINDOWS", False):
# This won't work on Powershell as-is, so we'll just completely
# skip until we have a need for it, at which point we'll have to do
# something different.
return remote_paths
# Step 2: If we're not becoming an unprivileged user, we are roughly
# done. Make the files +x if we're asked to, and return.
if not self._is_become_unprivileged():
if execute:
# Can't depend on the file being transferred with execute permissions.
# Only need user perms because no become was used here
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError(
'Failed to set execute bit on remote files '
'(rc: {0}, err: {1})'.format(
res['rc'],
to_native(res['stderr'])))
return remote_paths
# If we're still here, we have an unprivileged user that's different
# than the ssh user.
become_user = self.get_become_option('become_user')
# Try to use file system acls to make the files readable for sudo'd
# user
if execute:
chmod_mode = 'rx'
setfacl_mode = 'r-x'
# Apple patches their "file_cmds" chmod with ACL support
chmod_acl_mode = '{0} allow read,execute'.format(become_user)
# POSIX-draft ACL specification. Solaris, maybe others.
# See chmod(1) on something Solaris-based for syntax details.
posix_acl_mode = 'A+user:{0}:rx:allow'.format(become_user)
else:
chmod_mode = 'rX'
# TODO: this form fails silently on freebsd. We currently
# never call _fixup_perms2() with execute=False but if we
# start to we'll have to fix this.
setfacl_mode = 'r-X'
# Apple
chmod_acl_mode = '{0} allow read'.format(become_user)
# POSIX-draft
posix_acl_mode = 'A+user:{0}:r:allow'.format(become_user)
# Step 3a: Are we able to use setfacl to add user ACLs to the file?
res = self._remote_set_user_facl(
remote_paths,
become_user,
setfacl_mode)
if res['rc'] == 0:
return remote_paths
# Step 3b: Set execute if we need to. We do this before anything else
# because some of the methods below might work but not let us set +x
# as part of them.
if execute:
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError(
'Failed to set file mode or acl on remote temporary files '
'(rc: {0}, err: {1})'.format(
res['rc'],
to_native(res['stderr'])))
# Step 3c: File system ACLs failed above; try falling back to chown.
res = self._remote_chown(remote_paths, become_user)
if res['rc'] == 0:
return remote_paths
# Check if we are an admin/root user. If we are and got here, it means
# we failed to chown as root and something weird has happened.
if remote_user in self._get_admin_users():
raise AnsibleError(
'Failed to change ownership of the temporary files Ansible '
'(via chmod nor setfacl) needs to create despite connecting as a '
'privileged user. Unprivileged become user would be unable to read'
' the file.')
# Step 3d: Try macOS's special chmod + ACL
# macOS chmod's +a flag takes its own argument. As a slight hack, we
# pass that argument as the first element of remote_paths. So we end
# up running `chmod +a [that argument] [file 1] [file 2] ...`
try:
res = self._remote_chmod([chmod_acl_mode] + list(remote_paths), '+a')
except AnsibleAuthenticationFailure as e:
# Solaris-based chmod will return 5 when it sees an invalid mode,
# and +a is invalid there. Because it returns 5, which is the same
# thing sshpass returns on auth failure, our sshpass code will
# assume that auth failed. If we don't handle that case here, none
# of the other logic below will get run. This is fairly hacky and a
# corner case, but probably one that shows up pretty often in
# Solaris-based environments (and possibly others).
pass
else:
if res['rc'] == 0:
return remote_paths
# Step 3e: Try Solaris/OpenSolaris/OpenIndiana-sans-setfacl chmod
# Similar to macOS above, Solaris 11.4 drops setfacl and takes file ACLs
# via chmod instead. OpenSolaris and illumos-based distros allow for
# using either setfacl or chmod, and compatibility depends on filesystem.
# It should be possible to debug this branch by installing OpenIndiana
# (use ZFS) and going unpriv -> unpriv.
res = self._remote_chmod(remote_paths, posix_acl_mode)
if res['rc'] == 0:
return remote_paths
# we'll need this down here
become_link = get_versioned_doclink('playbook_guide/playbooks_privilege_escalation.html')
# Step 3f: Common group
# Otherwise, we're a normal user. We failed to chown the paths to the
# unprivileged user, but if we have a common group with them, we should
# be able to chown it to that.
#
# Note that we have no way of knowing if this will actually work... just
# because chgrp exits successfully does not mean that Ansible will work.
# We could check if the become user is in the group, but this would
# create an extra round trip.
#
# Also note that due to the above, this can prevent the
# world_readable_temp logic below from ever getting called. We
# leave this up to the user to rectify if they have both of these
# features enabled.
group = self.get_shell_option('common_remote_group')
if group is not None:
res = self._remote_chgrp(remote_paths, group)
if res['rc'] == 0:
# warn user that something might go weirdly here.
if self.get_shell_option('world_readable_temp'):
display.warning(
'Both common_remote_group and '
'allow_world_readable_tmpfiles are set. chgrp was '
'successful, but there is no guarantee that Ansible '
'will be able to read the files after this operation, '
'particularly if common_remote_group was set to a '
'group of which the unprivileged become user is not a '
'member. In this situation, '
'allow_world_readable_tmpfiles is a no-op. See this '
'URL for more details: %s'
'#risks-of-becoming-an-unprivileged-user' % become_link)
if execute:
group_mode = 'g+rwx'
else:
group_mode = 'g+rw'
res = self._remote_chmod(remote_paths, group_mode)
if res['rc'] == 0:
return remote_paths
# Step 4: World-readable temp directory
if self.get_shell_option('world_readable_temp'):
# chown and fs acls failed -- do things this insecure way only if
# the user opted in in the config file
display.warning(
'Using world-readable permissions for temporary files Ansible '
'needs to create when becoming an unprivileged user. This may '
'be insecure. For information on securing this, see %s'
'#risks-of-becoming-an-unprivileged-user' % become_link)
res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
if res['rc'] == 0:
return remote_paths
raise AnsibleError(
'Failed to set file mode on remote files '
'(rc: {0}, err: {1})'.format(
res['rc'],
to_native(res['stderr'])))
raise AnsibleError(
'Failed to set permissions on the temporary files Ansible needs '
'to create when becoming an unprivileged user '
'(rc: %s, err: %s}). For information on working around this, see %s'
'#risks-of-becoming-an-unprivileged-user' % (
res['rc'],
to_native(res['stderr']), become_link))
def _remote_chmod(self, paths, mode, sudoable=False):
'''
Issue a remote chmod command
'''
cmd = self._connection._shell.chmod(paths, mode)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_chown(self, paths, user, sudoable=False):
'''
Issue a remote chown command
'''
cmd = self._connection._shell.chown(paths, user)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_chgrp(self, paths, group, sudoable=False):
'''
Issue a remote chgrp command
'''
cmd = self._connection._shell.chgrp(paths, group)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_set_user_facl(self, paths, user, mode, sudoable=False):
'''
Issue a remote call to setfacl
'''
cmd = self._connection._shell.set_user_facl(paths, user, mode)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _execute_remote_stat(self, path, all_vars, follow, tmp=None, checksum=True):
'''
Get information from remote file.
'''
if tmp is not None:
display.warning('_execute_remote_stat no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir')
del tmp # No longer used
module_args = dict(
path=path,
follow=follow,
get_checksum=checksum,
get_size=False, # ansible.windows.win_stat added this in 1.11.0
checksum_algorithm='sha1',
)
# Unknown opts are ignored as module_args could be specific for the
# module that is being executed.
mystat = self._execute_module(module_name='ansible.legacy.stat', module_args=module_args, task_vars=all_vars,
wrap_async=False, ignore_unknown_opts=True)
if mystat.get('failed'):
msg = mystat.get('module_stderr')
if not msg:
msg = mystat.get('module_stdout')
if not msg:
msg = mystat.get('msg')
raise AnsibleError('Failed to get information on remote file (%s): %s' % (path, msg))
if not mystat['stat']['exists']:
# empty might be matched, 1 should never match, also backwards compatible
mystat['stat']['checksum'] = '1'
# happens sometimes when it is a dir and not on bsd
if 'checksum' not in mystat['stat']:
mystat['stat']['checksum'] = ''
elif not isinstance(mystat['stat']['checksum'], string_types):
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
return mystat['stat']
def _remote_expand_user(self, path, sudoable=True, pathsep=None):
''' takes a remote path and performs tilde/$HOME expansion on the remote host '''
# We only expand ~/path and ~username/path
if not path.startswith('~'):
return path
# Per Jborean, we don't have to worry about Windows as we don't have a notion of user's home
# dir there.
split_path = path.split(os.path.sep, 1)
expand_path = split_path[0]
if expand_path == '~':
# Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
# As such, we want to avoid using remote_user for paths as remote_user may not line up with the local user
# This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
become_user = self.get_become_option('become_user')
if getattr(self._connection, '_remote_is_local', False):
pass
elif sudoable and self._connection.become and become_user:
expand_path = '~%s' % become_user
else:
# use remote user instead, if none set default to current user
expand_path = '~%s' % (self._get_remote_user() or '')
# use shell to construct appropriate command and execute
cmd = self._connection._shell.expand_user(expand_path)
data = self._low_level_execute_command(cmd, sudoable=False)
try:
initial_fragment = data['stdout'].strip().splitlines()[-1]
except IndexError:
initial_fragment = None
if not initial_fragment:
# Something went wrong trying to expand the path remotely. Try using pwd, if not, return
# the original string
cmd = self._connection._shell.pwd()
pwd = self._low_level_execute_command(cmd, sudoable=False).get('stdout', '').strip()
if pwd:
expanded = pwd
else:
expanded = path
elif len(split_path) > 1:
expanded = self._connection._shell.join_path(initial_fragment, *split_path[1:])
else:
expanded = initial_fragment
if '..' in os.path.dirname(expanded).split('/'):
raise AnsibleError("'%s' returned an invalid relative home directory path containing '..'" % self._get_remote_addr({}))
return expanded
def _strip_success_message(self, data):
'''
Removes the BECOME-SUCCESS message from the data.
'''
if data.strip().startswith('BECOME-SUCCESS-'):
data = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', data)
return data
def _update_module_args(self, module_name, module_args, task_vars, ignore_unknown_opts: bool = False):
# set check mode in the module arguments, if required
if self._task.check_mode:
if not self._supports_check_mode:
raise AnsibleError("check mode is not supported for this operation")
module_args['_ansible_check_mode'] = True
else:
module_args['_ansible_check_mode'] = False
# set no log in the module arguments, if required
no_target_syslog = C.config.get_config_value('DEFAULT_NO_TARGET_SYSLOG', variables=task_vars)
module_args['_ansible_no_log'] = self._task.no_log or no_target_syslog
# set debug in the module arguments, if required
module_args['_ansible_debug'] = C.DEFAULT_DEBUG
# let module know we are in diff mode
module_args['_ansible_diff'] = self._task.diff
# let module know our verbosity
module_args['_ansible_verbosity'] = display.verbosity
# give the module information about the ansible version
module_args['_ansible_version'] = __version__
# give the module information about its name
module_args['_ansible_module_name'] = module_name
# set the syslog facility to be used in the module
module_args['_ansible_syslog_facility'] = task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY)
# let module know about filesystems that selinux treats specially
module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
# what to do when parameter values are converted to strings
module_args['_ansible_string_conversion_action'] = C.STRING_CONVERSION_ACTION
# give the module the socket for persistent connections
module_args['_ansible_socket'] = getattr(self._connection, 'socket_path')
if not module_args['_ansible_socket']:
module_args['_ansible_socket'] = task_vars.get('ansible_socket')
# make sure all commands use the designated shell executable
module_args['_ansible_shell_executable'] = self._play_context.executable
# make sure modules are aware if they need to keep the remote files
module_args['_ansible_keep_remote_files'] = C.DEFAULT_KEEP_REMOTE_FILES
# make sure all commands use the designated temporary directory if created
if self._is_become_unprivileged(): # force fallback on remote_tmp as user cannot normally write to dir
module_args['_ansible_tmpdir'] = None
else:
module_args['_ansible_tmpdir'] = self._connection._shell.tmpdir
# make sure the remote_tmp value is sent through in case modules needs to create their own
module_args['_ansible_remote_tmp'] = self.get_shell_option('remote_tmp', default='~/.ansible/tmp')
# tells the module to ignore options that are not in its argspec.
module_args['_ansible_ignore_unknown_opts'] = ignore_unknown_opts
# allow user to insert string to add context to remote loggging
module_args['_ansible_target_log_info'] = C.config.get_config_value('TARGET_LOG_INFO', variables=task_vars)
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=None, wrap_async=False,
ignore_unknown_opts: bool = False):
'''
Transfer and run a module along with its arguments.
'''
if tmp is not None:
display.warning('_execute_module no longer honors the tmp parameter. Action plugins'
' should set self._connection._shell.tmpdir to share the tmpdir')
del tmp # No longer used
if delete_remote_tmp is not None:
display.warning('_execute_module no longer honors the delete_remote_tmp parameter.'
' Action plugins should check self._connection._shell.tmpdir to'
' see if a tmpdir existed before they were called to determine'
' if they are responsible for removing it.')
del delete_remote_tmp # No longer used
tmpdir = self._connection._shell.tmpdir
# We set the module_style to new here so the remote_tmp is created
# before the module args are built if remote_tmp is needed (async).
# If the module_style turns out to not be new and we didn't create the
# remote tmp here, it will still be created. This must be done before
# calling self._update_module_args() so the module wrapper has the
# correct remote_tmp value set
if not self._is_pipelining_enabled("new", wrap_async) and tmpdir is None:
self._make_tmp_path()
tmpdir = self._connection._shell.tmpdir
if task_vars is None:
task_vars = dict()
# if a module name was not specified for this execution, use the action from the task
if module_name is None:
module_name = self._task.action
if module_args is None:
module_args = self._task.args
self._update_module_args(module_name, module_args, task_vars, ignore_unknown_opts=ignore_unknown_opts)
remove_async_dir = None
if wrap_async or self._task.async_val:
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
remove_async_dir = len(self._task.environment)
self._task.environment.append({"ANSIBLE_ASYNC_DIR": async_dir})
# FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
display.vvv("Using module file %s" % module_path)
if not shebang and module_style != 'binary':
raise AnsibleError("module (%s) is missing interpreter line" % module_name)
self._used_interpreter = shebang
remote_module_path = None
if not self._is_pipelining_enabled(module_style, wrap_async):
# we might need remote tmp dir
if tmpdir is None:
self._make_tmp_path()
tmpdir = self._connection._shell.tmpdir
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
remote_module_path = self._connection._shell.join_path(tmpdir, 'AnsiballZ_%s' % remote_module_filename)
args_file_path = None
if module_style in ('old', 'non_native_want_json', 'binary'):
# we'll also need a tmp file to hold our module arguments
args_file_path = self._connection._shell.join_path(tmpdir, 'args')
if remote_module_path or module_style != 'new':
display.debug("transferring module to remote %s" % remote_module_path)
if module_style == 'binary':
self._transfer_file(module_path, remote_module_path)
else:
self._transfer_data(remote_module_path, module_data)
if module_style == 'old':
# we need to dump the module args to a k=v string in a file on
# the remote system, which can be read and parsed by the module
args_data = ""
for k, v in module_args.items():
args_data += '%s=%s ' % (k, shlex.quote(text_type(v)))
self._transfer_data(args_file_path, args_data)
elif module_style in ('non_native_want_json', 'binary'):
self._transfer_data(args_file_path, json.dumps(module_args))
display.debug("done transferring module to remote")
environment_string = self._compute_environment_string()
# remove the ANSIBLE_ASYNC_DIR env entry if we added a temporary one for
# the async_wrapper task.
if remove_async_dir is not None:
del self._task.environment[remove_async_dir]
remote_files = []
if tmpdir and remote_module_path:
remote_files = [tmpdir, remote_module_path]
if args_file_path:
remote_files.append(args_file_path)
sudoable = True
in_data = None
cmd = ""
if wrap_async and not self._connection.always_pipeline_modules:
# configure, upload, and chmod the async_wrapper module
(async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(
module_name='ansible.legacy.async_wrapper', module_args=dict(), task_vars=task_vars)
async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
remote_async_module_path = self._connection._shell.join_path(tmpdir, async_module_remote_filename)
self._transfer_data(remote_async_module_path, async_module_data)
remote_files.append(remote_async_module_path)
async_limit = self._task.async_val
async_jid = f'j{random.randint(0, 999999999999)}'
# call the interpreter for async_wrapper directly
# this permits use of a script for an interpreter on non-Linux platforms
interpreter = shebang.replace('#!', '').strip()
async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
if environment_string:
async_cmd.insert(0, environment_string)
if args_file_path:
async_cmd.append(args_file_path)
else:
# maintain a fixed number of positional parameters for async_wrapper
async_cmd.append('_')
if not self._should_remove_tmp_path(tmpdir):
async_cmd.append("-preserve_tmp")
cmd = " ".join(to_text(x) for x in async_cmd)
else:
if self._is_pipelining_enabled(module_style):
in_data = module_data
display.vvv("Pipelining is enabled.")
else:
cmd = remote_module_path
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()
# Fix permissions of the tmpdir path and tmpdir files. This should be called after all
# files have been transferred.
if remote_files:
# remove none/empty
remote_files = [x for x in remote_files if x]
self._fixup_perms2(remote_files, self._get_remote_user())
# actually execute
res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
# parse the main result
data = self._parse_returned_data(res)
# NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
# get internal info before cleaning
if data.pop("_ansible_suppress_tmpdir_delete", False):
self._cleanup_remote_tmp = False
# NOTE: yum returns results .. but that made it 'compatible' with squashing, so we allow mappings, for now
if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], string_types)):
data['ansible_module_results'] = data['results']
del data['results']
display.warning("Found internal 'results' key in module return, renamed to 'ansible_module_results'.")
# remove internal keys
remove_internal_keys(data)
if wrap_async:
# async_wrapper will clean up its tmpdir on its own so we want the controller side to
# forget about it now
self._connection._shell.tmpdir = None
# FIXME: for backwards compat, figure out if still makes sense
data['changed'] = True
# pre-split stdout/stderr into lines if needed
if 'stdout' in data and 'stdout_lines' not in data:
# if the value is 'False', a default won't catch it.
txt = data.get('stdout', None) or u''
data['stdout_lines'] = txt.splitlines()
if 'stderr' in data and 'stderr_lines' not in data:
# if the value is 'False', a default won't catch it.
txt = data.get('stderr', None) or u''
data['stderr_lines'] = txt.splitlines()
# propagate interpreter discovery results back to the controller
if self._discovered_interpreter_key:
if data.get('ansible_facts') is None:
data['ansible_facts'] = {}
data['ansible_facts'][self._discovered_interpreter_key] = self._discovered_interpreter
if self._discovery_warnings:
if data.get('warnings') is None:
data['warnings'] = []
data['warnings'].extend(self._discovery_warnings)
if self._discovery_deprecation_warnings:
if data.get('deprecations') is None:
data['deprecations'] = []
data['deprecations'].extend(self._discovery_deprecation_warnings)
# mark the entire module results untrusted as a template right here, since the current action could
# possibly template one of these values.
data = wrap_var(data)
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data
def _parse_returned_data(self, res):
try:
filtered_output, warnings = _filter_non_json_lines(res.get('stdout', u''), objects_only=True)
for w in warnings:
display.warning(w)
data = json.loads(filtered_output)
if C.MODULE_STRICT_UTF8_RESPONSE and not data.pop('_ansible_trusted_utf8', None):
try:
_validate_utf8_json(data)
except UnicodeEncodeError:
# When removing this, also remove the loop and latin-1 from ansible.module_utils.common.text.converters.jsonify
display.deprecated(
f'Module "{self._task.resolved_action or self._task.action}" returned non UTF-8 data in '
'the JSON response. This will become an error in the future',
version='2.18',
)
data['_ansible_parsed'] = True
except ValueError:
# not valid json, lets try to capture error
data = dict(failed=True, _ansible_parsed=False)
data['module_stdout'] = res.get('stdout', u'')
if 'stderr' in res:
data['module_stderr'] = res['stderr']
if res['stderr'].startswith(u'Traceback'):
data['exception'] = res['stderr']
# in some cases a traceback will arrive on stdout instead of stderr, such as when using ssh with -tt
if 'exception' not in data and data['module_stdout'].startswith(u'Traceback'):
data['exception'] = data['module_stdout']
# The default
data['msg'] = "MODULE FAILURE"
# try to figure out if we are missing interpreter
if self._used_interpreter is not None:
interpreter = re.escape(self._used_interpreter.lstrip('!#'))
match = re.compile('%s: (?:No such file or directory|not found)' % interpreter)
if match.search(data['module_stderr']) or match.search(data['module_stdout']):
data['msg'] = "The module failed to execute correctly, you probably need to set the interpreter."
# always append hint
data['msg'] += '\nSee stdout/stderr for the exact error'
if 'rc' in res:
data['rc'] = res['rc']
return data
# FIXME: move to connection base
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_then_replace', chdir=None):
'''
This is the function which executes the low level shell command, which
may be commands to create/remove directories for temporary files, or to
run the module code or python directly when pipelining.
:kwarg encoding_errors: If the value returned by the command isn't
utf-8 then we have to figure out how to transform it to unicode.
If the value is just going to be displayed to the user (or
discarded) then the default of 'replace' is fine. If the data is
used as a key or is going to be written back out to a file
verbatim, then this won't work. May have to use some sort of
replacement strategy (python3 could use surrogateescape)
:kwarg chdir: cd into this directory before executing the command.
'''
display.debug("_low_level_execute_command(): starting")
# if not cmd:
# # this can happen with powershell modules when there is no analog to a Windows command (like chmod)
# display.debug("_low_level_execute_command(): no command, exiting")
# return dict(stdout='', stderr='', rc=254)
if chdir:
display.debug("_low_level_execute_command(): changing cwd to %s for this command" % chdir)
cmd = self._connection._shell.append_command('cd %s' % chdir, cmd)
# https://github.com/ansible/ansible/issues/68054
if executable:
self._connection._shell.executable = executable
ruser = self._get_remote_user()
buser = self.get_become_option('become_user')
if (sudoable and self._connection.become and # if sudoable and have become
resource_from_fqcr(self._connection.transport) != 'network_cli' and # if not using network_cli
(C.BECOME_ALLOW_SAME_USER or (buser != ruser or not any((ruser, buser))))): # if we allow same user PE or users are different and either is set
display.debug("_low_level_execute_command(): using become for this command")
cmd = self._connection.become.build_become_command(cmd, self._connection._shell)
if self._connection.allow_executable:
if executable is None:
executable = self._play_context.executable
# mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
# only applied for the default executable to avoid interfering with the raw action
cmd = self._connection._shell.append_command(cmd, 'sleep 0')
if executable:
cmd = executable + ' -c ' + shlex.quote(cmd)
display.debug("_low_level_execute_command(): executing: %s" % (cmd,))
# Change directory to basedir of task for command execution when connection is local
if self._connection.transport == 'local':
self._connection.cwd = to_bytes(self._loader.get_basedir(), errors='surrogate_or_strict')
rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
# stdout and stderr may be either a file-like or a bytes object.
# Convert either one to a text type
if isinstance(stdout, binary_type):
out = to_text(stdout, errors=encoding_errors)
elif not isinstance(stdout, text_type):
out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
else:
out = stdout
if isinstance(stderr, binary_type):
err = to_text(stderr, errors=encoding_errors)
elif not isinstance(stderr, text_type):
err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
else:
err = stderr
if rc is None:
rc = 0
# be sure to remove the BECOME-SUCCESS message now
out = self._strip_success_message(out)
display.debug(u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err))
return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err, stderr_lines=err.splitlines())
def _get_diff_data(self, destination, source, task_vars, content, source_file=True):
# Note: Since we do not diff the source and destination before we transform from bytes into
# text the diff between source and destination may not be accurate. To fix this, we'd need
# to move the diffing from the callback plugins into here.
#
# Example of data which would cause trouble is src_content == b'\xff' and dest_content ==
# b'\xfe'. Neither of those are valid utf-8 so both get turned into the replacement
# character: diff['before'] = u'�' ; diff['after'] = u'�' When the callback plugin later
# diffs before and after it shows an empty diff.
diff = {}
display.debug("Going to peek to see if file has changed permissions")
peek_result = self._execute_module(
module_name='ansible.legacy.file', module_args=dict(path=destination, _diff_peek=True),
task_vars=task_vars, persist_files=True)
if peek_result.get('failed', False):
display.warning(u"Failed to get diff between '%s' and '%s': %s" % (os.path.basename(source), destination, to_text(peek_result.get(u'msg', u''))))
return diff
if peek_result.get('rc', 0) == 0:
if peek_result.get('state') in (None, 'absent'):
diff['before'] = u''
elif peek_result.get('appears_binary'):
diff['dst_binary'] = 1
elif peek_result.get('size') and C.MAX_FILE_SIZE_FOR_DIFF > 0 and peek_result['size'] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['dst_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug(u"Slurping the file %s" % source)
dest_result = self._execute_module(
module_name='ansible.legacy.slurp', module_args=dict(path=destination),
task_vars=task_vars, persist_files=True)
if 'content' in dest_result:
dest_contents = dest_result['content']
if dest_result['encoding'] == u'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise AnsibleError("unknown encoding in content option, failed: %s" % to_native(dest_result))
diff['before_header'] = destination
diff['before'] = to_text(dest_contents)
if source_file:
st = os.stat(source)
if C.MAX_FILE_SIZE_FOR_DIFF > 0 and st[stat.ST_SIZE] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug("Reading local copy of the file %s" % source)
try:
with open(source, 'rb') as src:
src_contents = src.read()
except Exception as e:
raise AnsibleError("Unexpected error while reading source (%s) for diff: %s " % (source, to_native(e)))
if b"\x00" in src_contents:
diff['src_binary'] = 1
else:
if content:
diff['after_header'] = destination
else:
diff['after_header'] = source
diff['after'] = to_text(src_contents)
else:
display.debug(u"source of file passed in")
diff['after_header'] = u'dynamically generated'
diff['after'] = source
if self._task.no_log:
if 'before' in diff:
diff["before"] = u""
if 'after' in diff:
diff["after"] = u" [[ Diff output has been hidden because 'no_log: true' was specified for this result ]]\n"
return diff
def _find_needle(self, dirname, needle):
'''
find a needle in haystack of paths, optionally using 'dirname' as a subdir.
This will build the ordered list of paths to search and pass them to dwim
to get back the first existing file found.
'''
# dwim already deals with playbook basedirs
path_stack = self._task.get_search_path()
# if missing it will return a file not found exception
return self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,359 |
Assemble module doesn't pass `content` arg to `_get_diff_data`
|
### Summary
When using the `ansible.builtin.assemble` module with `--diff`, the task fails with the following error:
> Unexpected failure during module execution: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
`ansible-playbook -vvvv` says the relevant function call happens in [ansible/plugins/action/assemble.py, line 143](https://github.com/ansible/ansible/blob/6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0/lib/ansible/plugins/action/assemble.py#L143).
Note: on my local machine that's currently line 144; 143 is on the current devel branch.
The last known working version is 8.6.1
### Issue Type
Bug Report
### Component Name
ansible.builtin.assemble
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = None
configured module search path = ['/Users/albalitz/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible
ansible collection location = /Users/albalitz/.ansible/collections:/usr/share/ansible/collections
executable location = /opt/homebrew/bin/ansible
python version = 3.12.0 (main, Oct 3 2023, 16:20:33) [Clang 14.0.3 (clang-1403.0.22.14.1)] (/opt/homebrew/Cellar/ansible/9.0.1/libexec/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = /usr/bin/vim
PAGER(env: PAGER) = less
```
### OS / Environment
This happens on my local machine running MacOS Sonoma 14.1.2 (Ansible installed via homebrew) as well as our CI system running in a `python:alpine`-based Docker environment with the same Ansible version as above (Ansible is installed via pip there and updated semi-automatically using renovatebot).
### Steps to Reproduce
This step fails with the error described above:
```yaml
- name: create concatenated file
local_action:
module: assemble
remote_src: false
src: files/some_files/
dest: /tmp/concatenated_file
no_log: true
changed_when: false
check_mode: no
become: no
run_once: true
```
The step works when `--diff` is removed from the `ansible-playbook` command.
### Expected Results
I expected the `assemble` step to run successfully and produce a concatenated file with `--diff` enabled but without printing the diff (due to `no_log: true` - I set that to `false` for debugging purposes to see the error message).
### Actual Results
```console
<localhost> ESTABLISH LOCAL CONNECTION FOR USER: albalitz
<localhost> EXEC /bin/sh -c 'echo ~albalitz && sleep 0'
<localhost> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo /Users/albalitz/.ansible/tmp `"&& mkdir "` echo /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079 `" && echo ansible-tmp-1701782995.5552058-23181-40564253413079="` echo /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079 `" ) && sleep 0'
Using module file /opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/modules/stat.py
<localhost> PUT /Users/albalitz/.ansible/tmp/ansible-local-23119muc1g04o/tmp8owc8yoz TO /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py
<localhost> EXEC /bin/sh -c 'chmod u+x /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/ /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py && sleep 0'
<localhost> EXEC /bin/sh -c '/opt/homebrew/Cellar/ansible/9.0.1/libexec/bin/python /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/AnsiballZ_stat.py && sleep 0'
<localhost> EXEC /bin/sh -c 'rm -f -r /Users/albalitz/.ansible/tmp/ansible-tmp-1701782995.5552058-23181-40564253413079/ > /dev/null 2>&1 && sleep 0'
The full traceback is:
Traceback (most recent call last):
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/executor/task_executor.py", line 165, in run
res = self._execute()
^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/executor/task_executor.py", line 641, in _execute
result = self._handler.run(task_vars=vars_copy)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/ansible/9.0.1/libexec/lib/python3.12/site-packages/ansible/plugins/action/assemble.py", line 144, in run
diff = self._get_diff_data(dest, path, task_vars)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
fatal: [shorewall-0 -> localhost]: FAILED! => {}
MSG:
Unexpected failure during module execution: ActionBase._get_diff_data() missing 1 required positional argument: 'content'
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82359
|
https://github.com/ansible/ansible/pull/82360
|
a9919dd7f62c9efe17b8acaebf7c627606ae9f66
|
7f2ad7eea673233223948e0d2a9fc5ee683040ce
| 2023-12-05T13:53:38Z |
python
| 2023-12-12T16:22:23Z |
test/integration/targets/assemble/tasks/main.yml
|
# test code for the assemble module
# (c) 2014, James Cammarata <[email protected]>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: copy the files to a new directory
copy: src="./" dest="{{remote_tmp_dir}}/src"
register: result
- name: create unicode file for test
shell: echo "π" > {{ remote_tmp_dir }}/src/ßΩ.txt
register: result
- name: assert that the new file was created
assert:
that:
- "result.changed == true"
- name: test assemble with all fragments
assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled1"
register: result
- name: assert the fragments were assembled
assert:
that:
- "result.state == 'file'"
- "result.changed == True"
- "result.checksum == '74152e9224f774191bc0bedf460d35de86ad90e6'"
- name: test assemble with all fragments
assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled1"
register: result
- name: assert that the same assemble made no changes
assert:
that:
- "result.state == 'file'"
- "result.changed == False"
- "result.checksum == '74152e9224f774191bc0bedf460d35de86ad90e6'"
- name: test assemble with all fragments and decrypt=True
assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled2" decrypt=yes
register: result
- name: assert the fragments were assembled with decrypt=True
assert:
that:
- "result.state == 'file'"
- "result.changed == True"
- "result.checksum == '74152e9224f774191bc0bedf460d35de86ad90e6'"
- name: test assemble with all fragments and decrypt=True
assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled2" decrypt=yes
register: result
- name: assert that the same assemble made no changes with decrypt=True
assert:
that:
- "result.state == 'file'"
- "result.changed == False"
- "result.checksum == '74152e9224f774191bc0bedf460d35de86ad90e6'"
- name: test assemble with fragments matching a regex
assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled3" regexp="^fragment[1-3]$"
register: result
- name: assert the fragments were assembled with a regex
assert:
that:
- "result.state == 'file'"
- "result.checksum == 'edfe2d7487ef8f5ebc0f1c4dc57ba7b70a7b8e2b'"
- name: test assemble with fragments matching a regex and decrypt=True
assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled4" regexp="^fragment[1-3]$" decrypt=yes
register: result
- name: assert the fragments were assembled with a regex and decrypt=True
assert:
that:
- "result.state == 'file'"
- "result.checksum == 'edfe2d7487ef8f5ebc0f1c4dc57ba7b70a7b8e2b'"
- name: test assemble with a delimiter
assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled5" delimiter="#--- delimiter ---#"
register: result
- name: assert the fragments were assembled with a delimiter
assert:
that:
- "result.state == 'file'"
- "result.checksum == 'd986cefb82e34e4cf14d33a3cda132ff45aa2980'"
- name: test assemble with a delimiter and decrypt=True
assemble: src="{{remote_tmp_dir}}/src" dest="{{remote_tmp_dir}}/assembled6" delimiter="#--- delimiter ---#" decrypt=yes
register: result
- name: assert the fragments were assembled with a delimiter and decrypt=True
assert:
that:
- "result.state == 'file'"
- "result.checksum == 'd986cefb82e34e4cf14d33a3cda132ff45aa2980'"
- name: test assemble with remote_src=False
assemble: src="./" dest="{{remote_tmp_dir}}/assembled7" remote_src=no
register: result
- name: assert the fragments were assembled without remote
assert:
that:
- "result.state == 'file'"
- "result.checksum == '048a1bd1951aa5ccc427eeb4ca19aee45e9c68b3'"
- name: test assemble with remote_src=False and decrypt=True
assemble: src="./" dest="{{remote_tmp_dir}}/assembled8" remote_src=no decrypt=yes
register: result
- name: assert the fragments were assembled without remote and decrypt=True
assert:
that:
- "result.state == 'file'"
- "result.checksum == '048a1bd1951aa5ccc427eeb4ca19aee45e9c68b3'"
- name: test assemble with remote_src=False and a delimiter
assemble: src="./" dest="{{remote_tmp_dir}}/assembled9" remote_src=no delimiter="#--- delimiter ---#"
register: result
- name: assert the fragments were assembled without remote
assert:
that:
- "result.state == 'file'"
- "result.checksum == '505359f48c65b3904127cf62b912991d4da7ed6d'"
- name: test assemble with remote_src=False and a delimiter and decrypt=True
assemble: src="./" dest="{{remote_tmp_dir}}/assembled10" remote_src=no delimiter="#--- delimiter ---#" decrypt=yes
register: result
- name: assert the fragments were assembled without remote
assert:
that:
- "result.state == 'file'"
- "result.checksum == '505359f48c65b3904127cf62b912991d4da7ed6d'"
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,175 |
ansible-galaxy role import always exits 0 even if import failed
|
### Summary
When a role import fails, the galaxy cli code does not alter the exit code for the command accordingly. It is always zero.
https://github.com/ansible/ansible/blob/devel/lib/ansible/cli/galaxy.py#L1818C1-L1833C17
```
if context.CLIARGS['check_status'] or context.CLIARGS['wait']:
# Get the status of the import
msg_list = []
finished = False
while not finished:
task = self.api.get_import_task(task_id=task[0]['id'])
for msg in task[0]['summary_fields']['task_messages']:
if msg['id'] not in msg_list:
display.display(msg['message_text'], color=colors[msg['message_type']])
msg_list.append(msg['id'])
if task[0]['state'] in ['SUCCESS', 'FAILED']:
finished = True
else:
time.sleep(10)
return 0
```
The code knows the task is "finished" if state is either SUCCESS or FAILED, but the FAILED state does not affect the return value.
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
(venv) [jtanner@p1 galaxy_ng.role_exception_logging]$ ansible-galaxy --version
[WARNING]: You are running the development version of Ansible. You should only run Ansible from "devel" if you are modifying the Ansible engine, or trying
out features under development. This is a rapidly changing source of code and can become unstable at any point.
ansible-galaxy [core 2.17.0.dev0] (devel fd009a073a) last updated 2023/11/08 12:01:58 (GMT -400)
config file = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.cfg
configured module search path = ['/home/jtanner/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.core/lib/ansible
ansible collection location = /home/jtanner/.ansible/collections:/usr/share/ansible/collections
executable location = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.core/bin/ansible-galaxy
python version = 3.11.6 (main, Oct 3 2023, 00:00:00) [GCC 13.2.1 20230728 (Red Hat 13.2.1-1)] (/home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/venv/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
N/A
```
### OS / Environment
Fedora 38
### Steps to Reproduce
1. Setup a galaxy server
2. ansible-galaxy role import -vvvv nephelaiio ansible-role-packetbeat
### Expected Results
The ansible-galaxy command should exit non-zero if the task is in a FAILED state.
### Actual Results
```console
exit 0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82175
|
https://github.com/ansible/ansible/pull/82193
|
7f2ad7eea673233223948e0d2a9fc5ee683040ce
|
fe81164fe548d79fbcd0024836d5f7474403c95d
| 2023-11-08T17:14:35Z |
python
| 2023-12-12T18:59:19Z |
changelogs/fragments/82175-fix-ansible-galaxy-role-import-rc.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,175 |
ansible-galaxy role import always exits 0 even if import failed
|
### Summary
When a role import fails, the galaxy cli code does not alter the exit code for the command accordingly. It is always zero.
https://github.com/ansible/ansible/blob/devel/lib/ansible/cli/galaxy.py#L1818C1-L1833C17
```
if context.CLIARGS['check_status'] or context.CLIARGS['wait']:
# Get the status of the import
msg_list = []
finished = False
while not finished:
task = self.api.get_import_task(task_id=task[0]['id'])
for msg in task[0]['summary_fields']['task_messages']:
if msg['id'] not in msg_list:
display.display(msg['message_text'], color=colors[msg['message_type']])
msg_list.append(msg['id'])
if task[0]['state'] in ['SUCCESS', 'FAILED']:
finished = True
else:
time.sleep(10)
return 0
```
The code knows the task is "finished" if state is either SUCCESS or FAILED, but the FAILED state does not affect the return value.
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
(venv) [jtanner@p1 galaxy_ng.role_exception_logging]$ ansible-galaxy --version
[WARNING]: You are running the development version of Ansible. You should only run Ansible from "devel" if you are modifying the Ansible engine, or trying
out features under development. This is a rapidly changing source of code and can become unstable at any point.
ansible-galaxy [core 2.17.0.dev0] (devel fd009a073a) last updated 2023/11/08 12:01:58 (GMT -400)
config file = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.cfg
configured module search path = ['/home/jtanner/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.core/lib/ansible
ansible collection location = /home/jtanner/.ansible/collections:/usr/share/ansible/collections
executable location = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.core/bin/ansible-galaxy
python version = 3.11.6 (main, Oct 3 2023, 00:00:00) [GCC 13.2.1 20230728 (Red Hat 13.2.1-1)] (/home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/venv/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
N/A
```
### OS / Environment
Fedora 38
### Steps to Reproduce
1. Setup a galaxy server
2. ansible-galaxy role import -vvvv nephelaiio ansible-role-packetbeat
### Expected Results
The ansible-galaxy command should exit non-zero if the task is in a FAILED state.
### Actual Results
```console
exit 0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82175
|
https://github.com/ansible/ansible/pull/82193
|
7f2ad7eea673233223948e0d2a9fc5ee683040ce
|
fe81164fe548d79fbcd0024836d5f7474403c95d
| 2023-11-08T17:14:35Z |
python
| 2023-12-12T18:59:19Z |
lib/ansible/cli/galaxy.py
|
#!/usr/bin/env python
# Copyright: (c) 2013, James Cammarata <[email protected]>
# Copyright: (c) 2018-2021, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# PYTHON_ARGCOMPLETE_OK
from __future__ import annotations
# ansible.cli needs to be imported first, to ensure the source bin/* scripts run that code first
from ansible.cli import CLI
import argparse
import functools
import json
import os.path
import pathlib
import re
import shutil
import sys
import textwrap
import time
import typing as t
from dataclasses import dataclass
from yaml.error import YAMLError
import ansible.constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import Galaxy, get_collections_galaxy_meta_info
from ansible.galaxy.api import GalaxyAPI, GalaxyError
from ansible.galaxy.collection import (
build_collection,
download_collections,
find_existing_collections,
install_collections,
publish_collection,
validate_collection_name,
validate_collection_path,
verify_collections,
SIGNATURE_COUNT_RE,
)
from ansible.galaxy.collection.concrete_artifact_manager import (
ConcreteArtifactsManager,
)
from ansible.galaxy.collection.gpg import GPG_ERROR_MAP
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
from ansible.galaxy.role import GalaxyRole
from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken, NoTokenSentinel
from ansible.module_utils.ansible_release import __version__ as ansible_version
from ansible.module_utils.common.collections import is_iterable
from ansible.module_utils.common.yaml import yaml_dump, yaml_load
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
from ansible.module_utils import six
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.playbook.role.requirement import RoleRequirement
from ansible.template import Templar
from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.display import Display
from ansible.utils.plugin_docs import get_versioned_doclink
display = Display()
urlparse = six.moves.urllib.parse.urlparse
# config definition by position: name, required, type
SERVER_DEF = [
('url', True, 'str'),
('username', False, 'str'),
('password', False, 'str'),
('token', False, 'str'),
('auth_url', False, 'str'),
('api_version', False, 'int'),
('validate_certs', False, 'bool'),
('client_id', False, 'str'),
('timeout', False, 'int'),
]
# config definition fields
SERVER_ADDITIONAL = {
'api_version': {'default': None, 'choices': [2, 3]},
'validate_certs': {'cli': [{'name': 'validate_certs'}]},
'timeout': {'default': C.GALAXY_SERVER_TIMEOUT, 'cli': [{'name': 'timeout'}]},
'token': {'default': None},
}
def with_collection_artifacts_manager(wrapped_method):
"""Inject an artifacts manager if not passed explicitly.
This decorator constructs a ConcreteArtifactsManager and maintains
the related temporary directory auto-cleanup around the target
method invocation.
"""
@functools.wraps(wrapped_method)
def method_wrapper(*args, **kwargs):
if 'artifacts_manager' in kwargs:
return wrapped_method(*args, **kwargs)
# FIXME: use validate_certs context from Galaxy servers when downloading collections
# .get used here for when this is used in a non-CLI context
artifacts_manager_kwargs = {'validate_certs': context.CLIARGS.get('resolved_validate_certs', True)}
keyring = context.CLIARGS.get('keyring', None)
if keyring is not None:
artifacts_manager_kwargs.update({
'keyring': GalaxyCLI._resolve_path(keyring),
'required_signature_count': context.CLIARGS.get('required_valid_signature_count', None),
'ignore_signature_errors': context.CLIARGS.get('ignore_gpg_errors', None),
})
with ConcreteArtifactsManager.under_tmpdir(
C.DEFAULT_LOCAL_TMP,
**artifacts_manager_kwargs
) as concrete_artifact_cm:
kwargs['artifacts_manager'] = concrete_artifact_cm
return wrapped_method(*args, **kwargs)
return method_wrapper
def _display_header(path, h1, h2, w1=10, w2=7):
display.display('\n# {0}\n{1:{cwidth}} {2:{vwidth}}\n{3} {4}\n'.format(
path,
h1,
h2,
'-' * max([len(h1), w1]), # Make sure that the number of dashes is at least the width of the header
'-' * max([len(h2), w2]),
cwidth=w1,
vwidth=w2,
))
def _display_role(gr):
install_info = gr.install_info
version = None
if install_info:
version = install_info.get("version", None)
if not version:
version = "(unknown version)"
display.display("- %s, %s" % (gr.name, version))
def _display_collection(collection, cwidth=10, vwidth=7, min_cwidth=10, min_vwidth=7):
display.display('{fqcn:{cwidth}} {version:{vwidth}}'.format(
fqcn=to_text(collection.fqcn),
version=collection.ver,
cwidth=max(cwidth, min_cwidth), # Make sure the width isn't smaller than the header
vwidth=max(vwidth, min_vwidth)
))
def _get_collection_widths(collections):
if not is_iterable(collections):
collections = (collections, )
fqcn_set = {to_text(c.fqcn) for c in collections}
version_set = {to_text(c.ver) for c in collections}
fqcn_length = len(max(fqcn_set or [''], key=len))
version_length = len(max(version_set or [''], key=len))
return fqcn_length, version_length
def validate_signature_count(value):
match = re.match(SIGNATURE_COUNT_RE, value)
if match is None:
raise ValueError(f"{value} is not a valid signature count value")
return value
@dataclass
class RoleDistributionServer:
_api: t.Union[GalaxyAPI, None]
api_servers: list[GalaxyAPI]
@property
def api(self):
if self._api:
return self._api
for server in self.api_servers:
try:
if u'v1' in server.available_api_versions:
self._api = server
break
except Exception:
continue
if not self._api:
self._api = self.api_servers[0]
return self._api
class GalaxyCLI(CLI):
'''Command to manage Ansible roles and collections.
None of the CLI tools are designed to run concurrently with themselves.
Use an external scheduler and/or locking to ensure there are no clashing operations.
'''
name = 'ansible-galaxy'
SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url")
def __init__(self, args):
self._raw_args = args
self._implicit_role = False
if len(args) > 1:
# Inject role into sys.argv[1] as a backwards compatibility step
if args[1] not in ['-h', '--help', '--version'] and 'role' not in args and 'collection' not in args:
# TODO: Should we add a warning here and eventually deprecate the implicit role subcommand choice
args.insert(1, 'role')
self._implicit_role = True
# since argparse doesn't allow hidden subparsers, handle dead login arg from raw args after "role" normalization
if args[1:3] == ['role', 'login']:
display.error(
"The login command was removed in late 2020. An API key is now required to publish roles or collections "
"to Galaxy. The key can be found at https://galaxy.ansible.com/me/preferences, and passed to the "
"ansible-galaxy CLI via a file at {0} or (insecurely) via the `--token` "
"command-line argument.".format(to_text(C.GALAXY_TOKEN_PATH)))
sys.exit(1)
self.api_servers = []
self.galaxy = None
self.lazy_role_api = None
super(GalaxyCLI, self).__init__(args)
def init_parser(self):
''' create an options parser for bin/ansible '''
super(GalaxyCLI, self).init_parser(
desc="Perform various Role and Collection related operations.",
)
# Common arguments that apply to more than 1 action
common = opt_help.ArgumentParser(add_help=False)
common.add_argument('-s', '--server', dest='api_server', help='The Galaxy API server URL')
common.add_argument('--api-version', type=int, choices=[2, 3], help=argparse.SUPPRESS) # Hidden argument that should only be used in our tests
common.add_argument('--token', '--api-key', dest='api_key',
help='The Ansible Galaxy API key which can be found at '
'https://galaxy.ansible.com/me/preferences.')
common.add_argument('-c', '--ignore-certs', action='store_true', dest='ignore_certs', help='Ignore SSL certificate validation errors.', default=None)
# --timeout uses the default None to handle two different scenarios.
# * --timeout > C.GALAXY_SERVER_TIMEOUT for non-configured servers
# * --timeout > server-specific timeout > C.GALAXY_SERVER_TIMEOUT for configured servers.
common.add_argument('--timeout', dest='timeout', type=int,
help="The time to wait for operations against the galaxy server, defaults to 60s.")
opt_help.add_verbosity_options(common)
force = opt_help.ArgumentParser(add_help=False)
force.add_argument('-f', '--force', dest='force', action='store_true', default=False,
help='Force overwriting an existing role or collection')
github = opt_help.ArgumentParser(add_help=False)
github.add_argument('github_user', help='GitHub username')
github.add_argument('github_repo', help='GitHub repository')
offline = opt_help.ArgumentParser(add_help=False)
offline.add_argument('--offline', dest='offline', default=False, action='store_true',
help="Don't query the galaxy API when creating roles")
default_roles_path = C.config.get_configuration_definition('DEFAULT_ROLES_PATH').get('default', '')
roles_path = opt_help.ArgumentParser(add_help=False)
roles_path.add_argument('-p', '--roles-path', dest='roles_path', type=opt_help.unfrack_path(pathsep=True),
default=C.DEFAULT_ROLES_PATH, action=opt_help.PrependListAction,
help='The path to the directory containing your roles. The default is the first '
'writable one configured via DEFAULT_ROLES_PATH: %s ' % default_roles_path)
collections_path = opt_help.ArgumentParser(add_help=False)
collections_path.add_argument('-p', '--collections-path', dest='collections_path', type=opt_help.unfrack_path(pathsep=True),
action=opt_help.PrependListAction,
help="One or more directories to search for collections in addition "
"to the default COLLECTIONS_PATHS. Separate multiple paths "
"with '{0}'.".format(os.path.pathsep))
cache_options = opt_help.ArgumentParser(add_help=False)
cache_options.add_argument('--clear-response-cache', dest='clear_response_cache', action='store_true',
default=False, help='Clear the existing server response cache.')
cache_options.add_argument('--no-cache', dest='no_cache', action='store_true', default=False,
help='Do not use the server response cache.')
# Add sub parser for the Galaxy role type (role or collection)
type_parser = self.parser.add_subparsers(metavar='TYPE', dest='type')
type_parser.required = True
# Add sub parser for the Galaxy collection actions
collection = type_parser.add_parser('collection', help='Manage an Ansible Galaxy collection.')
collection.set_defaults(func=self.execute_collection) # to satisfy doc build
collection_parser = collection.add_subparsers(metavar='COLLECTION_ACTION', dest='action')
collection_parser.required = True
self.add_download_options(collection_parser, parents=[common, cache_options])
self.add_init_options(collection_parser, parents=[common, force])
self.add_build_options(collection_parser, parents=[common, force])
self.add_publish_options(collection_parser, parents=[common])
self.add_install_options(collection_parser, parents=[common, force, cache_options])
self.add_list_options(collection_parser, parents=[common, collections_path])
self.add_verify_options(collection_parser, parents=[common, collections_path])
# Add sub parser for the Galaxy role actions
role = type_parser.add_parser('role', help='Manage an Ansible Galaxy role.')
role.set_defaults(func=self.execute_role) # to satisfy doc build
role_parser = role.add_subparsers(metavar='ROLE_ACTION', dest='action')
role_parser.required = True
self.add_init_options(role_parser, parents=[common, force, offline])
self.add_remove_options(role_parser, parents=[common, roles_path])
self.add_delete_options(role_parser, parents=[common, github])
self.add_list_options(role_parser, parents=[common, roles_path])
self.add_search_options(role_parser, parents=[common])
self.add_import_options(role_parser, parents=[common, github])
self.add_setup_options(role_parser, parents=[common, roles_path])
self.add_info_options(role_parser, parents=[common, roles_path, offline])
self.add_install_options(role_parser, parents=[common, force, roles_path])
def add_download_options(self, parser, parents=None):
download_parser = parser.add_parser('download', parents=parents,
help='Download collections and their dependencies as a tarball for an '
'offline install.')
download_parser.set_defaults(func=self.execute_download)
download_parser.add_argument('args', help='Collection(s)', metavar='collection', nargs='*')
download_parser.add_argument('-n', '--no-deps', dest='no_deps', action='store_true', default=False,
help="Don't download collection(s) listed as dependencies.")
download_parser.add_argument('-p', '--download-path', dest='download_path',
default='./collections',
help='The directory to download the collections to.')
download_parser.add_argument('-r', '--requirements-file', dest='requirements',
help='A file containing a list of collections to be downloaded.')
download_parser.add_argument('--pre', dest='allow_pre_release', action='store_true',
help='Include pre-release versions. Semantic versioning pre-releases are ignored by default')
def add_init_options(self, parser, parents=None):
galaxy_type = 'collection' if parser.metavar == 'COLLECTION_ACTION' else 'role'
init_parser = parser.add_parser('init', parents=parents,
help='Initialize new {0} with the base structure of a '
'{0}.'.format(galaxy_type))
init_parser.set_defaults(func=self.execute_init)
init_parser.add_argument('--init-path', dest='init_path', default='./',
help='The path in which the skeleton {0} will be created. The default is the '
'current working directory.'.format(galaxy_type))
init_parser.add_argument('--{0}-skeleton'.format(galaxy_type), dest='{0}_skeleton'.format(galaxy_type),
default=C.GALAXY_COLLECTION_SKELETON if galaxy_type == 'collection' else C.GALAXY_ROLE_SKELETON,
help='The path to a {0} skeleton that the new {0} should be based '
'upon.'.format(galaxy_type))
obj_name_kwargs = {}
if galaxy_type == 'collection':
obj_name_kwargs['type'] = validate_collection_name
init_parser.add_argument('{0}_name'.format(galaxy_type), help='{0} name'.format(galaxy_type.capitalize()),
**obj_name_kwargs)
if galaxy_type == 'role':
init_parser.add_argument('--type', dest='role_type', action='store', default='default',
help="Initialize using an alternate role type. Valid types include: 'container', "
"'apb' and 'network'.")
def add_remove_options(self, parser, parents=None):
remove_parser = parser.add_parser('remove', parents=parents, help='Delete roles from roles_path.')
remove_parser.set_defaults(func=self.execute_remove)
remove_parser.add_argument('args', help='Role(s)', metavar='role', nargs='+')
def add_delete_options(self, parser, parents=None):
delete_parser = parser.add_parser('delete', parents=parents,
help='Removes the role from Galaxy. It does not remove or alter the actual '
'GitHub repository.')
delete_parser.set_defaults(func=self.execute_delete)
def add_list_options(self, parser, parents=None):
galaxy_type = 'role'
if parser.metavar == 'COLLECTION_ACTION':
galaxy_type = 'collection'
list_parser = parser.add_parser('list', parents=parents,
help='Show the name and version of each {0} installed in the {0}s_path.'.format(galaxy_type))
list_parser.set_defaults(func=self.execute_list)
list_parser.add_argument(galaxy_type, help=galaxy_type.capitalize(), nargs='?', metavar=galaxy_type)
if galaxy_type == 'collection':
list_parser.add_argument('--format', dest='output_format', choices=('human', 'yaml', 'json'), default='human',
help="Format to display the list of collections in.")
def add_search_options(self, parser, parents=None):
search_parser = parser.add_parser('search', parents=parents,
help='Search the Galaxy database by tags, platforms, author and multiple '
'keywords.')
search_parser.set_defaults(func=self.execute_search)
search_parser.add_argument('--platforms', dest='platforms', help='list of OS platforms to filter by')
search_parser.add_argument('--galaxy-tags', dest='galaxy_tags', help='list of galaxy tags to filter by')
search_parser.add_argument('--author', dest='author', help='GitHub username')
search_parser.add_argument('args', help='Search terms', metavar='searchterm', nargs='*')
def add_import_options(self, parser, parents=None):
import_parser = parser.add_parser('import', parents=parents, help='Import a role into a galaxy server')
import_parser.set_defaults(func=self.execute_import)
import_parser.add_argument('--no-wait', dest='wait', action='store_false', default=True,
help="Don't wait for import results.")
import_parser.add_argument('--branch', dest='reference',
help='The name of a branch to import. Defaults to the repository\'s default branch '
'(usually master)')
import_parser.add_argument('--role-name', dest='role_name',
help='The name the role should have, if different than the repo name')
import_parser.add_argument('--status', dest='check_status', action='store_true', default=False,
help='Check the status of the most recent import request for given github_'
'user/github_repo.')
def add_setup_options(self, parser, parents=None):
setup_parser = parser.add_parser('setup', parents=parents,
help='Manage the integration between Galaxy and the given source.')
setup_parser.set_defaults(func=self.execute_setup)
setup_parser.add_argument('--remove', dest='remove_id', default=None,
help='Remove the integration matching the provided ID value. Use --list to see '
'ID values.')
setup_parser.add_argument('--list', dest="setup_list", action='store_true', default=False,
help='List all of your integrations.')
setup_parser.add_argument('source', help='Source')
setup_parser.add_argument('github_user', help='GitHub username')
setup_parser.add_argument('github_repo', help='GitHub repository')
setup_parser.add_argument('secret', help='Secret')
def add_info_options(self, parser, parents=None):
info_parser = parser.add_parser('info', parents=parents, help='View more details about a specific role.')
info_parser.set_defaults(func=self.execute_info)
info_parser.add_argument('args', nargs='+', help='role', metavar='role_name[,version]')
def add_verify_options(self, parser, parents=None):
galaxy_type = 'collection'
verify_parser = parser.add_parser('verify', parents=parents, help='Compare checksums with the collection(s) '
'found on the server and the installed copy. This does not verify dependencies.')
verify_parser.set_defaults(func=self.execute_verify)
verify_parser.add_argument('args', metavar='{0}_name'.format(galaxy_type), nargs='*', help='The installed collection(s) name. '
'This is mutually exclusive with --requirements-file.')
verify_parser.add_argument('-i', '--ignore-errors', dest='ignore_errors', action='store_true', default=False,
help='Ignore errors during verification and continue with the next specified collection.')
verify_parser.add_argument('--offline', dest='offline', action='store_true', default=False,
help='Validate collection integrity locally without contacting server for '
'canonical manifest hash.')
verify_parser.add_argument('-r', '--requirements-file', dest='requirements',
help='A file containing a list of collections to be verified.')
verify_parser.add_argument('--keyring', dest='keyring', default=C.GALAXY_GPG_KEYRING,
help='The keyring used during signature verification') # Eventually default to ~/.ansible/pubring.kbx?
verify_parser.add_argument('--signature', dest='signatures', action='append',
help='An additional signature source to verify the authenticity of the MANIFEST.json before using '
'it to verify the rest of the contents of a collection from a Galaxy server. Use in '
'conjunction with a positional collection name (mutually exclusive with --requirements-file).')
valid_signature_count_help = 'The number of signatures that must successfully verify the collection. This should be a positive integer ' \
'or all to signify that all signatures must be used to verify the collection. ' \
'Prepend the value with + to fail if no valid signatures are found for the collection (e.g. +all).'
ignore_gpg_status_help = 'A space separated list of status codes to ignore during signature verification (for example, NO_PUBKEY FAILURE). ' \
'Descriptions for the choices can be seen at L(https://github.com/gpg/gnupg/blob/master/doc/DETAILS#general-status-codes).' \
'Note: specify these after positional arguments or use -- to separate them.'
verify_parser.add_argument('--required-valid-signature-count', dest='required_valid_signature_count', type=validate_signature_count,
help=valid_signature_count_help, default=C.GALAXY_REQUIRED_VALID_SIGNATURE_COUNT)
verify_parser.add_argument('--ignore-signature-status-code', dest='ignore_gpg_errors', type=str, action='append',
help=opt_help.argparse.SUPPRESS, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
verify_parser.add_argument('--ignore-signature-status-codes', dest='ignore_gpg_errors', type=str, action='extend', nargs='+',
help=ignore_gpg_status_help, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
def add_install_options(self, parser, parents=None):
galaxy_type = 'collection' if parser.metavar == 'COLLECTION_ACTION' else 'role'
args_kwargs = {}
if galaxy_type == 'collection':
args_kwargs['help'] = 'The collection(s) name or path/url to a tar.gz collection artifact. This is ' \
'mutually exclusive with --requirements-file.'
ignore_errors_help = 'Ignore errors during installation and continue with the next specified ' \
'collection. This will not ignore dependency conflict errors.'
else:
args_kwargs['help'] = 'Role name, URL or tar file'
ignore_errors_help = 'Ignore errors and continue with the next specified role.'
install_parser = parser.add_parser('install', parents=parents,
help='Install {0}(s) from file(s), URL(s) or Ansible '
'Galaxy'.format(galaxy_type))
install_parser.set_defaults(func=self.execute_install)
install_parser.add_argument('args', metavar='{0}_name'.format(galaxy_type), nargs='*', **args_kwargs)
install_parser.add_argument('-i', '--ignore-errors', dest='ignore_errors', action='store_true', default=False,
help=ignore_errors_help)
install_exclusive = install_parser.add_mutually_exclusive_group()
install_exclusive.add_argument('-n', '--no-deps', dest='no_deps', action='store_true', default=False,
help="Don't download {0}s listed as dependencies.".format(galaxy_type))
install_exclusive.add_argument('--force-with-deps', dest='force_with_deps', action='store_true', default=False,
help="Force overwriting an existing {0} and its "
"dependencies.".format(galaxy_type))
valid_signature_count_help = 'The number of signatures that must successfully verify the collection. This should be a positive integer ' \
'or -1 to signify that all signatures must be used to verify the collection. ' \
'Prepend the value with + to fail if no valid signatures are found for the collection (e.g. +all).'
ignore_gpg_status_help = 'A space separated list of status codes to ignore during signature verification (for example, NO_PUBKEY FAILURE). ' \
'Descriptions for the choices can be seen at L(https://github.com/gpg/gnupg/blob/master/doc/DETAILS#general-status-codes).' \
'Note: specify these after positional arguments or use -- to separate them.'
if galaxy_type == 'collection':
install_parser.add_argument('-p', '--collections-path', dest='collections_path',
default=self._get_default_collection_path(),
help='The path to the directory containing your collections.')
install_parser.add_argument('-r', '--requirements-file', dest='requirements',
help='A file containing a list of collections to be installed.')
install_parser.add_argument('--pre', dest='allow_pre_release', action='store_true',
help='Include pre-release versions. Semantic versioning pre-releases are ignored by default')
install_parser.add_argument('-U', '--upgrade', dest='upgrade', action='store_true', default=False,
help='Upgrade installed collection artifacts. This will also update dependencies unless --no-deps is provided')
install_parser.add_argument('--keyring', dest='keyring', default=C.GALAXY_GPG_KEYRING,
help='The keyring used during signature verification') # Eventually default to ~/.ansible/pubring.kbx?
install_parser.add_argument('--disable-gpg-verify', dest='disable_gpg_verify', action='store_true',
default=C.GALAXY_DISABLE_GPG_VERIFY,
help='Disable GPG signature verification when installing collections from a Galaxy server')
install_parser.add_argument('--signature', dest='signatures', action='append',
help='An additional signature source to verify the authenticity of the MANIFEST.json before '
'installing the collection from a Galaxy server. Use in conjunction with a positional '
'collection name (mutually exclusive with --requirements-file).')
install_parser.add_argument('--required-valid-signature-count', dest='required_valid_signature_count', type=validate_signature_count,
help=valid_signature_count_help, default=C.GALAXY_REQUIRED_VALID_SIGNATURE_COUNT)
install_parser.add_argument('--ignore-signature-status-code', dest='ignore_gpg_errors', type=str, action='append',
help=opt_help.argparse.SUPPRESS, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
install_parser.add_argument('--ignore-signature-status-codes', dest='ignore_gpg_errors', type=str, action='extend', nargs='+',
help=ignore_gpg_status_help, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
install_parser.add_argument('--offline', dest='offline', action='store_true', default=False,
help='Install collection artifacts (tarballs) without contacting any distribution servers. '
'This does not apply to collections in remote Git repositories or URLs to remote tarballs.'
)
else:
install_parser.add_argument('-r', '--role-file', dest='requirements',
help='A file containing a list of roles to be installed.')
r_re = re.compile(r'^(?<!-)-[a-zA-Z]*r[a-zA-Z]*') # -r, -fr
contains_r = bool([a for a in self._raw_args if r_re.match(a)])
role_file_re = re.compile(r'--role-file($|=)') # --role-file foo, --role-file=foo
contains_role_file = bool([a for a in self._raw_args if role_file_re.match(a)])
if self._implicit_role and (contains_r or contains_role_file):
# Any collections in the requirements files will also be installed
install_parser.add_argument('--keyring', dest='keyring', default=C.GALAXY_GPG_KEYRING,
help='The keyring used during collection signature verification')
install_parser.add_argument('--disable-gpg-verify', dest='disable_gpg_verify', action='store_true',
default=C.GALAXY_DISABLE_GPG_VERIFY,
help='Disable GPG signature verification when installing collections from a Galaxy server')
install_parser.add_argument('--required-valid-signature-count', dest='required_valid_signature_count', type=validate_signature_count,
help=valid_signature_count_help, default=C.GALAXY_REQUIRED_VALID_SIGNATURE_COUNT)
install_parser.add_argument('--ignore-signature-status-code', dest='ignore_gpg_errors', type=str, action='append',
help=opt_help.argparse.SUPPRESS, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
install_parser.add_argument('--ignore-signature-status-codes', dest='ignore_gpg_errors', type=str, action='extend', nargs='+',
help=ignore_gpg_status_help, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
install_parser.add_argument('-g', '--keep-scm-meta', dest='keep_scm_meta', action='store_true',
default=False,
help='Use tar instead of the scm archive option when packaging the role.')
def add_build_options(self, parser, parents=None):
build_parser = parser.add_parser('build', parents=parents,
help='Build an Ansible collection artifact that can be published to Ansible '
'Galaxy.')
build_parser.set_defaults(func=self.execute_build)
build_parser.add_argument('args', metavar='collection', nargs='*', default=('.',),
help='Path to the collection(s) directory to build. This should be the directory '
'that contains the galaxy.yml file. The default is the current working '
'directory.')
build_parser.add_argument('--output-path', dest='output_path', default='./',
help='The path in which the collection is built to. The default is the current '
'working directory.')
def add_publish_options(self, parser, parents=None):
publish_parser = parser.add_parser('publish', parents=parents,
help='Publish a collection artifact to Ansible Galaxy.')
publish_parser.set_defaults(func=self.execute_publish)
publish_parser.add_argument('args', metavar='collection_path',
help='The path to the collection tarball to publish.')
publish_parser.add_argument('--no-wait', dest='wait', action='store_false', default=True,
help="Don't wait for import validation results.")
publish_parser.add_argument('--import-timeout', dest='import_timeout', type=int, default=0,
help="The time to wait for the collection import process to finish.")
def post_process_args(self, options):
options = super(GalaxyCLI, self).post_process_args(options)
# ensure we have 'usable' cli option
setattr(options, 'validate_certs', (None if options.ignore_certs is None else not options.ignore_certs))
# the default if validate_certs is None
setattr(options, 'resolved_validate_certs', (options.validate_certs if options.validate_certs is not None else not C.GALAXY_IGNORE_CERTS))
display.verbosity = options.verbosity
return options
def run(self):
super(GalaxyCLI, self).run()
self.galaxy = Galaxy()
def server_config_def(section, key, required, option_type):
config_def = {
'description': 'The %s of the %s Galaxy server' % (key, section),
'ini': [
{
'section': 'galaxy_server.%s' % section,
'key': key,
}
],
'env': [
{'name': 'ANSIBLE_GALAXY_SERVER_%s_%s' % (section.upper(), key.upper())},
],
'required': required,
'type': option_type,
}
if key in SERVER_ADDITIONAL:
config_def.update(SERVER_ADDITIONAL[key])
return config_def
galaxy_options = {}
for optional_key in ['clear_response_cache', 'no_cache']:
if optional_key in context.CLIARGS:
galaxy_options[optional_key] = context.CLIARGS[optional_key]
config_servers = []
# Need to filter out empty strings or non truthy values as an empty server list env var is equal to [''].
server_list = [s for s in C.GALAXY_SERVER_LIST or [] if s]
for server_priority, server_key in enumerate(server_list, start=1):
# Abuse the 'plugin config' by making 'galaxy_server' a type of plugin
# Config definitions are looked up dynamically based on the C.GALAXY_SERVER_LIST entry. We look up the
# section [galaxy_server.<server>] for the values url, username, password, and token.
config_dict = dict((k, server_config_def(server_key, k, req, ensure_type)) for k, req, ensure_type in SERVER_DEF)
defs = AnsibleLoader(yaml_dump(config_dict)).get_single_data()
C.config.initialize_plugin_configuration_definitions('galaxy_server', server_key, defs)
# resolve the config created options above with existing config and user options
server_options = C.config.get_plugin_options('galaxy_server', server_key)
# auth_url is used to create the token, but not directly by GalaxyAPI, so
# it doesn't need to be passed as kwarg to GalaxyApi, same for others we pop here
auth_url = server_options.pop('auth_url')
client_id = server_options.pop('client_id')
token_val = server_options['token'] or NoTokenSentinel
username = server_options['username']
api_version = server_options.pop('api_version')
if server_options['validate_certs'] is None:
server_options['validate_certs'] = context.CLIARGS['resolved_validate_certs']
validate_certs = server_options['validate_certs']
# This allows a user to explicitly force use of an API version when
# multiple versions are supported. This was added for testing
# against pulp_ansible and I'm not sure it has a practical purpose
# outside of this use case. As such, this option is not documented
# as of now
if api_version:
display.warning(
f'The specified "api_version" configuration for the galaxy server "{server_key}" is '
'not a public configuration, and may be removed at any time without warning.'
)
server_options['available_api_versions'] = {'v%s' % api_version: '/v%s' % api_version}
# default case if no auth info is provided.
server_options['token'] = None
if username:
server_options['token'] = BasicAuthToken(username, server_options['password'])
else:
if token_val:
if auth_url:
server_options['token'] = KeycloakToken(access_token=token_val,
auth_url=auth_url,
validate_certs=validate_certs,
client_id=client_id)
else:
# The galaxy v1 / github / django / 'Token'
server_options['token'] = GalaxyToken(token=token_val)
server_options.update(galaxy_options)
config_servers.append(GalaxyAPI(
self.galaxy, server_key,
priority=server_priority,
**server_options
))
cmd_server = context.CLIARGS['api_server']
if context.CLIARGS['api_version']:
api_version = context.CLIARGS['api_version']
display.warning(
'The --api-version is not a public argument, and may be removed at any time without warning.'
)
galaxy_options['available_api_versions'] = {'v%s' % api_version: '/v%s' % api_version}
cmd_token = GalaxyToken(token=context.CLIARGS['api_key'])
validate_certs = context.CLIARGS['resolved_validate_certs']
default_server_timeout = context.CLIARGS['timeout'] if context.CLIARGS['timeout'] is not None else C.GALAXY_SERVER_TIMEOUT
if cmd_server:
# Cmd args take precedence over the config entry but fist check if the arg was a name and use that config
# entry, otherwise create a new API entry for the server specified.
config_server = next((s for s in config_servers if s.name == cmd_server), None)
if config_server:
self.api_servers.append(config_server)
else:
self.api_servers.append(GalaxyAPI(
self.galaxy, 'cmd_arg', cmd_server, token=cmd_token,
priority=len(config_servers) + 1,
validate_certs=validate_certs,
timeout=default_server_timeout,
**galaxy_options
))
else:
self.api_servers = config_servers
# Default to C.GALAXY_SERVER if no servers were defined
if len(self.api_servers) == 0:
self.api_servers.append(GalaxyAPI(
self.galaxy, 'default', C.GALAXY_SERVER, token=cmd_token,
priority=0,
validate_certs=validate_certs,
timeout=default_server_timeout,
**galaxy_options
))
# checks api versions once a GalaxyRole makes an api call
# self.api can be used to evaluate the best server immediately
self.lazy_role_api = RoleDistributionServer(None, self.api_servers)
return context.CLIARGS['func']()
@property
def api(self):
return self.lazy_role_api.api
def _get_default_collection_path(self):
return C.COLLECTIONS_PATHS[0]
def _parse_requirements_file(self, requirements_file, allow_old_format=True, artifacts_manager=None, validate_signature_options=True):
"""
Parses an Ansible requirement.yml file and returns all the roles and/or collections defined in it. There are 2
requirements file format:
# v1 (roles only)
- src: The source of the role, required if include is not set. Can be Galaxy role name, URL to a SCM repo or tarball.
name: Downloads the role to the specified name, defaults to Galaxy name from Galaxy or name of repo if src is a URL.
scm: If src is a URL, specify the SCM. Only git or hd are supported and defaults ot git.
version: The version of the role to download. Can also be tag, commit, or branch name and defaults to master.
include: Path to additional requirements.yml files.
# v2 (roles and collections)
---
roles:
# Same as v1 format just under the roles key
collections:
- namespace.collection
- name: namespace.collection
version: version identifier, multiple identifiers are separated by ','
source: the URL or a predefined source name that relates to C.GALAXY_SERVER_LIST
type: git|file|url|galaxy
:param requirements_file: The path to the requirements file.
:param allow_old_format: Will fail if a v1 requirements file is found and this is set to False.
:param artifacts_manager: Artifacts manager.
:return: a dict containing roles and collections to found in the requirements file.
"""
requirements = {
'roles': [],
'collections': [],
}
b_requirements_file = to_bytes(requirements_file, errors='surrogate_or_strict')
if not os.path.exists(b_requirements_file):
raise AnsibleError("The requirements file '%s' does not exist." % to_native(requirements_file))
display.vvv("Reading requirement file at '%s'" % requirements_file)
with open(b_requirements_file, 'rb') as req_obj:
try:
file_requirements = yaml_load(req_obj)
except YAMLError as err:
raise AnsibleError(
"Failed to parse the requirements yml at '%s' with the following error:\n%s"
% (to_native(requirements_file), to_native(err)))
if file_requirements is None:
raise AnsibleError("No requirements found in file '%s'" % to_native(requirements_file))
def parse_role_req(requirement):
if "include" not in requirement:
role = RoleRequirement.role_yaml_parse(requirement)
display.vvv("found role %s in yaml file" % to_text(role))
if "name" not in role and "src" not in role:
raise AnsibleError("Must specify name or src for role")
return [GalaxyRole(self.galaxy, self.lazy_role_api, **role)]
else:
b_include_path = to_bytes(requirement["include"], errors="surrogate_or_strict")
if not os.path.isfile(b_include_path):
raise AnsibleError("Failed to find include requirements file '%s' in '%s'"
% (to_native(b_include_path), to_native(requirements_file)))
with open(b_include_path, 'rb') as f_include:
try:
return [GalaxyRole(self.galaxy, self.lazy_role_api, **r) for r in
(RoleRequirement.role_yaml_parse(i) for i in yaml_load(f_include))]
except Exception as e:
raise AnsibleError("Unable to load data from include requirements file: %s %s"
% (to_native(requirements_file), to_native(e)))
if isinstance(file_requirements, list):
# Older format that contains only roles
if not allow_old_format:
raise AnsibleError("Expecting requirements file to be a dict with the key 'collections' that contains "
"a list of collections to install")
for role_req in file_requirements:
requirements['roles'] += parse_role_req(role_req)
elif isinstance(file_requirements, dict):
# Newer format with a collections and/or roles key
extra_keys = set(file_requirements.keys()).difference(set(['roles', 'collections']))
if extra_keys:
raise AnsibleError("Expecting only 'roles' and/or 'collections' as base keys in the requirements "
"file. Found: %s" % (to_native(", ".join(extra_keys))))
for role_req in file_requirements.get('roles') or []:
requirements['roles'] += parse_role_req(role_req)
requirements['collections'] = [
Requirement.from_requirement_dict(
self._init_coll_req_dict(collection_req),
artifacts_manager,
validate_signature_options,
)
for collection_req in file_requirements.get('collections') or []
]
else:
raise AnsibleError(f"Expecting requirements yaml to be a list or dictionary but got {type(file_requirements).__name__}")
return requirements
def _init_coll_req_dict(self, coll_req):
if not isinstance(coll_req, dict):
# Assume it's a string:
return {'name': coll_req}
if (
'name' not in coll_req or
not coll_req.get('source') or
coll_req.get('type', 'galaxy') != 'galaxy'
):
return coll_req
# Try and match up the requirement source with our list of Galaxy API
# servers defined in the config, otherwise create a server with that
# URL without any auth.
coll_req['source'] = next(
iter(
srvr for srvr in self.api_servers
if coll_req['source'] in {srvr.name, srvr.api_server}
),
GalaxyAPI(
self.galaxy,
'explicit_requirement_{name!s}'.format(
name=coll_req['name'],
),
coll_req['source'],
validate_certs=context.CLIARGS['resolved_validate_certs'],
),
)
return coll_req
@staticmethod
def exit_without_ignore(rc=1):
"""
Exits with the specified return code unless the
option --ignore-errors was specified
"""
if not context.CLIARGS['ignore_errors']:
raise AnsibleError('- you can use --ignore-errors to skip failed roles and finish processing the list.')
@staticmethod
def _display_role_info(role_info):
text = [u"", u"Role: %s" % to_text(role_info['name'])]
# Get the top-level 'description' first, falling back to galaxy_info['galaxy_info']['description'].
galaxy_info = role_info.get('galaxy_info', {})
description = role_info.get('description', galaxy_info.get('description', ''))
text.append(u"\tdescription: %s" % description)
for k in sorted(role_info.keys()):
if k in GalaxyCLI.SKIP_INFO_KEYS:
continue
if isinstance(role_info[k], dict):
text.append(u"\t%s:" % (k))
for key in sorted(role_info[k].keys()):
if key in GalaxyCLI.SKIP_INFO_KEYS:
continue
text.append(u"\t\t%s: %s" % (key, role_info[k][key]))
else:
text.append(u"\t%s: %s" % (k, role_info[k]))
# make sure we have a trailing newline returned
text.append(u"")
return u'\n'.join(text)
@staticmethod
def _resolve_path(path):
return os.path.abspath(os.path.expanduser(os.path.expandvars(path)))
@staticmethod
def _get_skeleton_galaxy_yml(template_path, inject_data):
with open(to_bytes(template_path, errors='surrogate_or_strict'), 'rb') as template_obj:
meta_template = to_text(template_obj.read(), errors='surrogate_or_strict')
galaxy_meta = get_collections_galaxy_meta_info()
required_config = []
optional_config = []
for meta_entry in galaxy_meta:
config_list = required_config if meta_entry.get('required', False) else optional_config
value = inject_data.get(meta_entry['key'], None)
if not value:
meta_type = meta_entry.get('type', 'str')
if meta_type == 'str':
value = ''
elif meta_type == 'list':
value = []
elif meta_type == 'dict':
value = {}
meta_entry['value'] = value
config_list.append(meta_entry)
link_pattern = re.compile(r"L\(([^)]+),\s+([^)]+)\)")
const_pattern = re.compile(r"C\(([^)]+)\)")
def comment_ify(v):
if isinstance(v, list):
v = ". ".join([l.rstrip('.') for l in v])
v = link_pattern.sub(r"\1 <\2>", v)
v = const_pattern.sub(r"'\1'", v)
return textwrap.fill(v, width=117, initial_indent="# ", subsequent_indent="# ", break_on_hyphens=False)
loader = DataLoader()
templar = Templar(loader, variables={'required_config': required_config, 'optional_config': optional_config})
templar.environment.filters['comment_ify'] = comment_ify
meta_value = templar.template(meta_template)
return meta_value
def _require_one_of_collections_requirements(
self, collections, requirements_file,
signatures=None,
artifacts_manager=None,
):
if collections and requirements_file:
raise AnsibleError("The positional collection_name arg and --requirements-file are mutually exclusive.")
elif not collections and not requirements_file:
raise AnsibleError("You must specify a collection name or a requirements file.")
elif requirements_file:
if signatures is not None:
raise AnsibleError(
"The --signatures option and --requirements-file are mutually exclusive. "
"Use the --signatures with positional collection_name args or provide a "
"'signatures' key for requirements in the --requirements-file."
)
requirements_file = GalaxyCLI._resolve_path(requirements_file)
requirements = self._parse_requirements_file(
requirements_file,
allow_old_format=False,
artifacts_manager=artifacts_manager,
)
else:
requirements = {
'collections': [
Requirement.from_string(coll_input, artifacts_manager, signatures)
for coll_input in collections
],
'roles': [],
}
return requirements
############################
# execute actions
############################
def execute_role(self):
"""
Perform the action on an Ansible Galaxy role. Must be combined with a further action like delete/install/init
as listed below.
"""
# To satisfy doc build
pass
def execute_collection(self):
"""
Perform the action on an Ansible Galaxy collection. Must be combined with a further action like init/install as
listed below.
"""
# To satisfy doc build
pass
def execute_build(self):
"""
Build an Ansible Galaxy collection artifact that can be stored in a central repository like Ansible Galaxy.
By default, this command builds from the current working directory. You can optionally pass in the
collection input path (where the ``galaxy.yml`` file is).
"""
force = context.CLIARGS['force']
output_path = GalaxyCLI._resolve_path(context.CLIARGS['output_path'])
b_output_path = to_bytes(output_path, errors='surrogate_or_strict')
if not os.path.exists(b_output_path):
os.makedirs(b_output_path)
elif os.path.isfile(b_output_path):
raise AnsibleError("- the output collection directory %s is a file - aborting" % to_native(output_path))
for collection_path in context.CLIARGS['args']:
collection_path = GalaxyCLI._resolve_path(collection_path)
build_collection(
to_text(collection_path, errors='surrogate_or_strict'),
to_text(output_path, errors='surrogate_or_strict'),
force,
)
@with_collection_artifacts_manager
def execute_download(self, artifacts_manager=None):
"""Download collections and their dependencies as a tarball for an offline install."""
collections = context.CLIARGS['args']
no_deps = context.CLIARGS['no_deps']
download_path = context.CLIARGS['download_path']
requirements_file = context.CLIARGS['requirements']
if requirements_file:
requirements_file = GalaxyCLI._resolve_path(requirements_file)
requirements = self._require_one_of_collections_requirements(
collections, requirements_file,
artifacts_manager=artifacts_manager,
)['collections']
download_path = GalaxyCLI._resolve_path(download_path)
b_download_path = to_bytes(download_path, errors='surrogate_or_strict')
if not os.path.exists(b_download_path):
os.makedirs(b_download_path)
download_collections(
requirements, download_path, self.api_servers, no_deps,
context.CLIARGS['allow_pre_release'],
artifacts_manager=artifacts_manager,
)
return 0
def execute_init(self):
"""
Creates the skeleton framework of a role or collection that complies with the Galaxy metadata format.
Requires a role or collection name. The collection name must be in the format ``<namespace>.<collection>``.
"""
galaxy_type = context.CLIARGS['type']
init_path = context.CLIARGS['init_path']
force = context.CLIARGS['force']
obj_skeleton = context.CLIARGS['{0}_skeleton'.format(galaxy_type)]
obj_name = context.CLIARGS['{0}_name'.format(galaxy_type)]
inject_data = dict(
description='your {0} description'.format(galaxy_type),
ansible_plugin_list_dir=get_versioned_doclink('plugins/plugins.html'),
)
if galaxy_type == 'role':
inject_data.update(dict(
author='your name',
company='your company (optional)',
license='license (GPL-2.0-or-later, MIT, etc)',
role_name=obj_name,
role_type=context.CLIARGS['role_type'],
issue_tracker_url='http://example.com/issue/tracker',
repository_url='http://example.com/repository',
documentation_url='http://docs.example.com',
homepage_url='http://example.com',
min_ansible_version=ansible_version[:3], # x.y
dependencies=[],
))
skeleton_ignore_expressions = C.GALAXY_ROLE_SKELETON_IGNORE
obj_path = os.path.join(init_path, obj_name)
elif galaxy_type == 'collection':
namespace, collection_name = obj_name.split('.', 1)
inject_data.update(dict(
namespace=namespace,
collection_name=collection_name,
version='1.0.0',
readme='README.md',
authors=['your name <[email protected]>'],
license=['GPL-2.0-or-later'],
repository='http://example.com/repository',
documentation='http://docs.example.com',
homepage='http://example.com',
issues='http://example.com/issue/tracker',
build_ignore=[],
))
skeleton_ignore_expressions = C.GALAXY_COLLECTION_SKELETON_IGNORE
obj_path = os.path.join(init_path, namespace, collection_name)
b_obj_path = to_bytes(obj_path, errors='surrogate_or_strict')
if os.path.exists(b_obj_path):
if os.path.isfile(obj_path):
raise AnsibleError("- the path %s already exists, but is a file - aborting" % to_native(obj_path))
elif not force:
raise AnsibleError("- the directory %s already exists. "
"You can use --force to re-initialize this directory,\n"
"however it will reset any main.yml files that may have\n"
"been modified there already." % to_native(obj_path))
# delete the contents rather than the collection root in case init was run from the root (--init-path ../../)
for root, dirs, files in os.walk(b_obj_path, topdown=True):
for old_dir in dirs:
path = os.path.join(root, old_dir)
shutil.rmtree(path)
for old_file in files:
path = os.path.join(root, old_file)
os.unlink(path)
if obj_skeleton is not None:
own_skeleton = False
else:
own_skeleton = True
obj_skeleton = self.galaxy.default_role_skeleton_path
skeleton_ignore_expressions = ['^.*/.git_keep$']
obj_skeleton = os.path.expanduser(obj_skeleton)
skeleton_ignore_re = [re.compile(x) for x in skeleton_ignore_expressions]
if not os.path.exists(obj_skeleton):
raise AnsibleError("- the skeleton path '{0}' does not exist, cannot init {1}".format(
to_native(obj_skeleton), galaxy_type)
)
loader = DataLoader()
templar = Templar(loader, variables=inject_data)
# create role directory
if not os.path.exists(b_obj_path):
os.makedirs(b_obj_path)
for root, dirs, files in os.walk(obj_skeleton, topdown=True):
rel_root = os.path.relpath(root, obj_skeleton)
rel_dirs = rel_root.split(os.sep)
rel_root_dir = rel_dirs[0]
if galaxy_type == 'collection':
# A collection can contain templates in playbooks/*/templates and roles/*/templates
in_templates_dir = rel_root_dir in ['playbooks', 'roles'] and 'templates' in rel_dirs
else:
in_templates_dir = rel_root_dir == 'templates'
# Filter out ignored directory names
# Use [:] to mutate the list os.walk uses
dirs[:] = [d for d in dirs if not any(r.match(d) for r in skeleton_ignore_re)]
for f in files:
filename, ext = os.path.splitext(f)
if any(r.match(os.path.join(rel_root, f)) for r in skeleton_ignore_re):
continue
if galaxy_type == 'collection' and own_skeleton and rel_root == '.' and f == 'galaxy.yml.j2':
# Special use case for galaxy.yml.j2 in our own default collection skeleton. We build the options
# dynamically which requires special options to be set.
# The templated data's keys must match the key name but the inject data contains collection_name
# instead of name. We just make a copy and change the key back to name for this file.
template_data = inject_data.copy()
template_data['name'] = template_data.pop('collection_name')
meta_value = GalaxyCLI._get_skeleton_galaxy_yml(os.path.join(root, rel_root, f), template_data)
b_dest_file = to_bytes(os.path.join(obj_path, rel_root, filename), errors='surrogate_or_strict')
with open(b_dest_file, 'wb') as galaxy_obj:
galaxy_obj.write(to_bytes(meta_value, errors='surrogate_or_strict'))
elif ext == ".j2" and not in_templates_dir:
src_template = os.path.join(root, f)
dest_file = os.path.join(obj_path, rel_root, filename)
template_data = to_text(loader._get_file_contents(src_template)[0], errors='surrogate_or_strict')
b_rendered = to_bytes(templar.template(template_data), errors='surrogate_or_strict')
with open(dest_file, 'wb') as df:
df.write(b_rendered)
else:
f_rel_path = os.path.relpath(os.path.join(root, f), obj_skeleton)
shutil.copyfile(os.path.join(root, f), os.path.join(obj_path, f_rel_path), follow_symlinks=False)
for d in dirs:
b_dir_path = to_bytes(os.path.join(obj_path, rel_root, d), errors='surrogate_or_strict')
if os.path.exists(b_dir_path):
continue
b_src_dir = to_bytes(os.path.join(root, d), errors='surrogate_or_strict')
if os.path.islink(b_src_dir):
shutil.copyfile(b_src_dir, b_dir_path, follow_symlinks=False)
else:
os.makedirs(b_dir_path)
display.display("- %s %s was created successfully" % (galaxy_type.title(), obj_name))
def execute_info(self):
"""
prints out detailed information about an installed role as well as info available from the galaxy API.
"""
roles_path = context.CLIARGS['roles_path']
data = ''
for role in context.CLIARGS['args']:
role_info = {'path': roles_path}
gr = GalaxyRole(self.galaxy, self.lazy_role_api, role)
install_info = gr.install_info
if install_info:
if 'version' in install_info:
install_info['installed_version'] = install_info['version']
del install_info['version']
role_info.update(install_info)
if not context.CLIARGS['offline']:
remote_data = None
try:
remote_data = self.api.lookup_role_by_name(role, False)
except GalaxyError as e:
if e.http_code == 400 and 'Bad Request' in e.message:
# Role does not exist in Ansible Galaxy
data = u"- the role %s was not found" % role
break
raise AnsibleError("Unable to find info about '%s': %s" % (role, e))
if remote_data:
role_info.update(remote_data)
else:
data = u"- the role %s was not found" % role
break
elif context.CLIARGS['offline'] and not gr._exists:
data = u"- the role %s was not found" % role
break
if gr.metadata:
role_info.update(gr.metadata)
req = RoleRequirement()
role_spec = req.role_yaml_parse({'role': role})
if role_spec:
role_info.update(role_spec)
data += self._display_role_info(role_info)
self.pager(data)
@with_collection_artifacts_manager
def execute_verify(self, artifacts_manager=None):
"""Compare checksums with the collection(s) found on the server and the installed copy. This does not verify dependencies."""
collections = context.CLIARGS['args']
search_paths = AnsibleCollectionConfig.collection_paths
ignore_errors = context.CLIARGS['ignore_errors']
local_verify_only = context.CLIARGS['offline']
requirements_file = context.CLIARGS['requirements']
signatures = context.CLIARGS['signatures']
if signatures is not None:
signatures = list(signatures)
requirements = self._require_one_of_collections_requirements(
collections, requirements_file,
signatures=signatures,
artifacts_manager=artifacts_manager,
)['collections']
resolved_paths = [validate_collection_path(GalaxyCLI._resolve_path(path)) for path in search_paths]
results = verify_collections(
requirements, resolved_paths,
self.api_servers, ignore_errors,
local_verify_only=local_verify_only,
artifacts_manager=artifacts_manager,
)
if any(result for result in results if not result.success):
return 1
return 0
@with_collection_artifacts_manager
def execute_install(self, artifacts_manager=None):
"""
Install one or more roles(``ansible-galaxy role install``), or one or more collections(``ansible-galaxy collection install``).
You can pass in a list (roles or collections) or use the file
option listed below (these are mutually exclusive). If you pass in a list, it
can be a name (which will be downloaded via the galaxy API and github), or it can be a local tar archive file.
"""
install_items = context.CLIARGS['args']
requirements_file = context.CLIARGS['requirements']
collection_path = None
signatures = context.CLIARGS.get('signatures')
if signatures is not None:
signatures = list(signatures)
if requirements_file:
requirements_file = GalaxyCLI._resolve_path(requirements_file)
two_type_warning = "The requirements file '%s' contains {0}s which will be ignored. To install these {0}s " \
"run 'ansible-galaxy {0} install -r' or to install both at the same time run " \
"'ansible-galaxy install -r' without a custom install path." % to_text(requirements_file)
# TODO: Would be nice to share the same behaviour with args and -r in collections and roles.
collection_requirements = []
role_requirements = []
if context.CLIARGS['type'] == 'collection':
collection_path = GalaxyCLI._resolve_path(context.CLIARGS['collections_path'])
requirements = self._require_one_of_collections_requirements(
install_items, requirements_file,
signatures=signatures,
artifacts_manager=artifacts_manager,
)
collection_requirements = requirements['collections']
if requirements['roles']:
display.vvv(two_type_warning.format('role'))
else:
if not install_items and requirements_file is None:
raise AnsibleOptionsError("- you must specify a user/role name or a roles file")
if requirements_file:
if not (requirements_file.endswith('.yaml') or requirements_file.endswith('.yml')):
raise AnsibleError("Invalid role requirements file, it must end with a .yml or .yaml extension")
galaxy_args = self._raw_args
will_install_collections = self._implicit_role and '-p' not in galaxy_args and '--roles-path' not in galaxy_args
requirements = self._parse_requirements_file(
requirements_file,
artifacts_manager=artifacts_manager,
validate_signature_options=will_install_collections,
)
role_requirements = requirements['roles']
# We can only install collections and roles at the same time if the type wasn't specified and the -p
# argument was not used. If collections are present in the requirements then at least display a msg.
if requirements['collections'] and (not self._implicit_role or '-p' in galaxy_args or
'--roles-path' in galaxy_args):
# We only want to display a warning if 'ansible-galaxy install -r ... -p ...'. Other cases the user
# was explicit about the type and shouldn't care that collections were skipped.
display_func = display.warning if self._implicit_role else display.vvv
display_func(two_type_warning.format('collection'))
else:
collection_path = self._get_default_collection_path()
collection_requirements = requirements['collections']
else:
# roles were specified directly, so we'll just go out grab them
# (and their dependencies, unless the user doesn't want us to).
for rname in context.CLIARGS['args']:
role = RoleRequirement.role_yaml_parse(rname.strip())
role_requirements.append(GalaxyRole(self.galaxy, self.lazy_role_api, **role))
if not role_requirements and not collection_requirements:
display.display("Skipping install, no requirements found")
return
if role_requirements:
display.display("Starting galaxy role install process")
self._execute_install_role(role_requirements)
if collection_requirements:
display.display("Starting galaxy collection install process")
# Collections can technically be installed even when ansible-galaxy is in role mode so we need to pass in
# the install path as context.CLIARGS['collections_path'] won't be set (default is calculated above).
self._execute_install_collection(
collection_requirements, collection_path,
artifacts_manager=artifacts_manager,
)
def _execute_install_collection(
self, requirements, path, artifacts_manager,
):
force = context.CLIARGS['force']
ignore_errors = context.CLIARGS['ignore_errors']
no_deps = context.CLIARGS['no_deps']
force_with_deps = context.CLIARGS['force_with_deps']
try:
disable_gpg_verify = context.CLIARGS['disable_gpg_verify']
except KeyError:
if self._implicit_role:
raise AnsibleError(
'Unable to properly parse command line arguments. Please use "ansible-galaxy collection install" '
'instead of "ansible-galaxy install".'
)
raise
# If `ansible-galaxy install` is used, collection-only options aren't available to the user and won't be in context.CLIARGS
allow_pre_release = context.CLIARGS.get('allow_pre_release', False)
upgrade = context.CLIARGS.get('upgrade', False)
collections_path = C.COLLECTIONS_PATHS
managed_paths = set(validate_collection_path(p) for p in C.COLLECTIONS_PATHS)
read_req_paths = set(validate_collection_path(p) for p in AnsibleCollectionConfig.collection_paths)
unexpected_path = C.GALAXY_COLLECTIONS_PATH_WARNING and not any(p.startswith(path) for p in managed_paths)
if unexpected_path and any(p.startswith(path) for p in read_req_paths):
display.warning(
f"The specified collections path '{path}' appears to be part of the pip Ansible package. "
"Managing these directly with ansible-galaxy could break the Ansible package. "
"Install collections to a configured collections path, which will take precedence over "
"collections found in the PYTHONPATH."
)
elif unexpected_path:
display.warning("The specified collections path '%s' is not part of the configured Ansible "
"collections paths '%s'. The installed collection will not be picked up in an Ansible "
"run, unless within a playbook-adjacent collections directory." % (to_text(path), to_text(":".join(collections_path))))
output_path = validate_collection_path(path)
b_output_path = to_bytes(output_path, errors='surrogate_or_strict')
if not os.path.exists(b_output_path):
os.makedirs(b_output_path)
install_collections(
requirements, output_path, self.api_servers, ignore_errors,
no_deps, force, force_with_deps, upgrade,
allow_pre_release=allow_pre_release,
artifacts_manager=artifacts_manager,
disable_gpg_verify=disable_gpg_verify,
offline=context.CLIARGS.get('offline', False),
read_requirement_paths=read_req_paths,
)
return 0
def _execute_install_role(self, requirements):
role_file = context.CLIARGS['requirements']
no_deps = context.CLIARGS['no_deps']
force_deps = context.CLIARGS['force_with_deps']
force = context.CLIARGS['force'] or force_deps
for role in requirements:
# only process roles in roles files when names matches if given
if role_file and context.CLIARGS['args'] and role.name not in context.CLIARGS['args']:
display.vvv('Skipping role %s' % role.name)
continue
display.vvv('Processing role %s ' % role.name)
# query the galaxy API for the role data
if role.install_info is not None:
if role.install_info['version'] != role.version or force:
if force:
display.display('- changing role %s from %s to %s' %
(role.name, role.install_info['version'], role.version or "unspecified"))
role.remove()
else:
display.warning('- %s (%s) is already installed - use --force to change version to %s' %
(role.name, role.install_info['version'], role.version or "unspecified"))
continue
else:
if not force:
display.display('- %s is already installed, skipping.' % str(role))
continue
try:
installed = role.install()
except AnsibleError as e:
display.warning(u"- %s was NOT installed successfully: %s " % (role.name, to_text(e)))
self.exit_without_ignore()
continue
# install dependencies, if we want them
if not no_deps and installed:
if not role.metadata:
# NOTE: the meta file is also required for installing the role, not just dependencies
display.warning("Meta file %s is empty. Skipping dependencies." % role.path)
else:
role_dependencies = role.metadata_dependencies + role.requirements
for dep in role_dependencies:
display.debug('Installing dep %s' % dep)
dep_req = RoleRequirement()
dep_info = dep_req.role_yaml_parse(dep)
dep_role = GalaxyRole(self.galaxy, self.lazy_role_api, **dep_info)
if '.' not in dep_role.name and '.' not in dep_role.src and dep_role.scm is None:
# we know we can skip this, as it's not going to
# be found on galaxy.ansible.com
continue
if dep_role.install_info is None:
if dep_role not in requirements:
display.display('- adding dependency: %s' % to_text(dep_role))
requirements.append(dep_role)
else:
display.display('- dependency %s already pending installation.' % dep_role.name)
else:
if dep_role.install_info['version'] != dep_role.version:
if force_deps:
display.display('- changing dependent role %s from %s to %s' %
(dep_role.name, dep_role.install_info['version'], dep_role.version or "unspecified"))
dep_role.remove()
requirements.append(dep_role)
else:
display.warning('- dependency %s (%s) from role %s differs from already installed version (%s), skipping' %
(to_text(dep_role), dep_role.version, role.name, dep_role.install_info['version']))
else:
if force_deps:
requirements.append(dep_role)
else:
display.display('- dependency %s is already installed, skipping.' % dep_role.name)
if not installed:
display.warning("- %s was NOT installed successfully." % role.name)
self.exit_without_ignore()
return 0
def execute_remove(self):
"""
removes the list of roles passed as arguments from the local system.
"""
if not context.CLIARGS['args']:
raise AnsibleOptionsError('- you must specify at least one role to remove.')
for role_name in context.CLIARGS['args']:
role = GalaxyRole(self.galaxy, self.api, role_name)
try:
if role.remove():
display.display('- successfully removed %s' % role_name)
else:
display.display('- %s is not installed, skipping.' % role_name)
except Exception as e:
raise AnsibleError("Failed to remove role %s: %s" % (role_name, to_native(e)))
return 0
def execute_list(self):
"""
List installed collections or roles
"""
if context.CLIARGS['type'] == 'role':
self.execute_list_role()
elif context.CLIARGS['type'] == 'collection':
self.execute_list_collection()
def execute_list_role(self):
"""
List all roles installed on the local system or a specific role
"""
path_found = False
role_found = False
warnings = []
roles_search_paths = context.CLIARGS['roles_path']
role_name = context.CLIARGS['role']
for path in roles_search_paths:
role_path = GalaxyCLI._resolve_path(path)
if os.path.isdir(path):
path_found = True
else:
warnings.append("- the configured path {0} does not exist.".format(path))
continue
if role_name:
# show the requested role, if it exists
gr = GalaxyRole(self.galaxy, self.lazy_role_api, role_name, path=os.path.join(role_path, role_name))
if os.path.isdir(gr.path):
role_found = True
display.display('# %s' % os.path.dirname(gr.path))
_display_role(gr)
break
warnings.append("- the role %s was not found" % role_name)
else:
if not os.path.exists(role_path):
warnings.append("- the configured path %s does not exist." % role_path)
continue
if not os.path.isdir(role_path):
warnings.append("- the configured path %s, exists, but it is not a directory." % role_path)
continue
display.display('# %s' % role_path)
path_files = os.listdir(role_path)
for path_file in path_files:
gr = GalaxyRole(self.galaxy, self.lazy_role_api, path_file, path=path)
if gr.metadata:
_display_role(gr)
# Do not warn if the role was found in any of the search paths
if role_found and role_name:
warnings = []
for w in warnings:
display.warning(w)
if not path_found:
raise AnsibleOptionsError(
"- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])
)
return 0
@with_collection_artifacts_manager
def execute_list_collection(self, artifacts_manager=None):
"""
List all collections installed on the local system
:param artifacts_manager: Artifacts manager.
"""
if artifacts_manager is not None:
artifacts_manager.require_build_metadata = False
output_format = context.CLIARGS['output_format']
collection_name = context.CLIARGS['collection']
default_collections_path = set(C.COLLECTIONS_PATHS)
collections_search_paths = (
set(context.CLIARGS['collections_path'] or []) | default_collections_path | set(AnsibleCollectionConfig.collection_paths)
)
collections_in_paths = {}
warnings = []
path_found = False
collection_found = False
namespace_filter = None
collection_filter = None
if collection_name:
# list a specific collection
validate_collection_name(collection_name)
namespace_filter, collection_filter = collection_name.split('.')
collections = list(find_existing_collections(
list(collections_search_paths),
artifacts_manager,
namespace_filter=namespace_filter,
collection_filter=collection_filter,
dedupe=False
))
seen = set()
fqcn_width, version_width = _get_collection_widths(collections)
for collection in sorted(collections, key=lambda c: c.src):
collection_found = True
collection_path = pathlib.Path(to_text(collection.src)).parent.parent.as_posix()
if output_format in {'yaml', 'json'}:
collections_in_paths.setdefault(collection_path, {})
collections_in_paths[collection_path][collection.fqcn] = {'version': collection.ver}
else:
if collection_path not in seen:
_display_header(
collection_path,
'Collection',
'Version',
fqcn_width,
version_width
)
seen.add(collection_path)
_display_collection(collection, fqcn_width, version_width)
path_found = False
for path in collections_search_paths:
if not os.path.exists(path):
if path in default_collections_path:
# don't warn for missing default paths
continue
warnings.append("- the configured path {0} does not exist.".format(path))
elif os.path.exists(path) and not os.path.isdir(path):
warnings.append("- the configured path {0}, exists, but it is not a directory.".format(path))
else:
path_found = True
# Do not warn if the specific collection was found in any of the search paths
if collection_found and collection_name:
warnings = []
for w in warnings:
display.warning(w)
if not collections and not path_found:
raise AnsibleOptionsError(
"- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])
)
if output_format == 'json':
display.display(json.dumps(collections_in_paths))
elif output_format == 'yaml':
display.display(yaml_dump(collections_in_paths))
return 0
def execute_publish(self):
"""
Publish a collection into Ansible Galaxy. Requires the path to the collection tarball to publish.
"""
collection_path = GalaxyCLI._resolve_path(context.CLIARGS['args'])
wait = context.CLIARGS['wait']
timeout = context.CLIARGS['import_timeout']
publish_collection(collection_path, self.api, wait, timeout)
def execute_search(self):
''' searches for roles on the Ansible Galaxy server'''
page_size = 1000
search = None
if context.CLIARGS['args']:
search = '+'.join(context.CLIARGS['args'])
if not search and not context.CLIARGS['platforms'] and not context.CLIARGS['galaxy_tags'] and not context.CLIARGS['author']:
raise AnsibleError("Invalid query. At least one search term, platform, galaxy tag or author must be provided.")
response = self.api.search_roles(search, platforms=context.CLIARGS['platforms'],
tags=context.CLIARGS['galaxy_tags'], author=context.CLIARGS['author'], page_size=page_size)
if response['count'] == 0:
display.warning("No roles match your search.")
return 0
data = [u'']
if response['count'] > page_size:
data.append(u"Found %d roles matching your search. Showing first %s." % (response['count'], page_size))
else:
data.append(u"Found %d roles matching your search:" % response['count'])
max_len = []
for role in response['results']:
max_len.append(len(role['username'] + '.' + role['name']))
name_len = max(max_len)
format_str = u" %%-%ds %%s" % name_len
data.append(u'')
data.append(format_str % (u"Name", u"Description"))
data.append(format_str % (u"----", u"-----------"))
for role in response['results']:
data.append(format_str % (u'%s.%s' % (role['username'], role['name']), role['description']))
data = u'\n'.join(data)
self.pager(data)
return 0
def execute_import(self):
""" used to import a role into Ansible Galaxy """
colors = {
'INFO': 'normal',
'WARNING': C.COLOR_WARN,
'ERROR': C.COLOR_ERROR,
'SUCCESS': C.COLOR_OK,
'FAILED': C.COLOR_ERROR,
}
github_user = to_text(context.CLIARGS['github_user'], errors='surrogate_or_strict')
github_repo = to_text(context.CLIARGS['github_repo'], errors='surrogate_or_strict')
if context.CLIARGS['check_status']:
task = self.api.get_import_task(github_user=github_user, github_repo=github_repo)
else:
# Submit an import request
task = self.api.create_import_task(github_user, github_repo,
reference=context.CLIARGS['reference'],
role_name=context.CLIARGS['role_name'])
if len(task) > 1:
# found multiple roles associated with github_user/github_repo
display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user, github_repo),
color='yellow')
display.display("The following Galaxy roles are being updated:" + u'\n', color=C.COLOR_CHANGED)
for t in task:
display.display('%s.%s' % (t['summary_fields']['role']['namespace'], t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
display.display(u'\nTo properly namespace this role, remove each of the above and re-import %s/%s from scratch' % (github_user, github_repo),
color=C.COLOR_CHANGED)
return 0
# found a single role as expected
display.display("Successfully submitted import request %d" % task[0]['id'])
if not context.CLIARGS['wait']:
display.display("Role name: %s" % task[0]['summary_fields']['role']['name'])
display.display("Repo: %s/%s" % (task[0]['github_user'], task[0]['github_repo']))
if context.CLIARGS['check_status'] or context.CLIARGS['wait']:
# Get the status of the import
msg_list = []
finished = False
while not finished:
task = self.api.get_import_task(task_id=task[0]['id'])
for msg in task[0]['summary_fields']['task_messages']:
if msg['id'] not in msg_list:
display.display(msg['message_text'], color=colors[msg['message_type']])
msg_list.append(msg['id'])
if task[0]['state'] in ['SUCCESS', 'FAILED']:
finished = True
else:
time.sleep(10)
return 0
def execute_setup(self):
""" Setup an integration from Github or Travis for Ansible Galaxy roles"""
if context.CLIARGS['setup_list']:
# List existing integration secrets
secrets = self.api.list_secrets()
if len(secrets) == 0:
# None found
display.display("No integrations found.")
return 0
display.display(u'\n' + "ID Source Repo", color=C.COLOR_OK)
display.display("---------- ---------- ----------", color=C.COLOR_OK)
for secret in secrets:
display.display("%-10s %-10s %s/%s" % (secret['id'], secret['source'], secret['github_user'],
secret['github_repo']), color=C.COLOR_OK)
return 0
if context.CLIARGS['remove_id']:
# Remove a secret
self.api.remove_secret(context.CLIARGS['remove_id'])
display.display("Secret removed. Integrations using this secret will not longer work.", color=C.COLOR_OK)
return 0
source = context.CLIARGS['source']
github_user = context.CLIARGS['github_user']
github_repo = context.CLIARGS['github_repo']
secret = context.CLIARGS['secret']
resp = self.api.add_secret(source, github_user, github_repo, secret)
display.display("Added integration for %s %s/%s" % (resp['source'], resp['github_user'], resp['github_repo']))
return 0
def execute_delete(self):
""" Delete a role from Ansible Galaxy. """
github_user = context.CLIARGS['github_user']
github_repo = context.CLIARGS['github_repo']
resp = self.api.delete_role(github_user, github_repo)
if len(resp['deleted_roles']) > 1:
display.display("Deleted the following roles:")
display.display("ID User Name")
display.display("------ --------------- ----------")
for role in resp['deleted_roles']:
display.display("%-8s %-15s %s" % (role.id, role.namespace, role.name))
display.display(resp['status'])
return 0
def main(args=None):
GalaxyCLI.cli_executor(args)
if __name__ == '__main__':
main()
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,175 |
ansible-galaxy role import always exits 0 even if import failed
|
### Summary
When a role import fails, the galaxy cli code does not alter the exit code for the command accordingly. It is always zero.
https://github.com/ansible/ansible/blob/devel/lib/ansible/cli/galaxy.py#L1818C1-L1833C17
```
if context.CLIARGS['check_status'] or context.CLIARGS['wait']:
# Get the status of the import
msg_list = []
finished = False
while not finished:
task = self.api.get_import_task(task_id=task[0]['id'])
for msg in task[0]['summary_fields']['task_messages']:
if msg['id'] not in msg_list:
display.display(msg['message_text'], color=colors[msg['message_type']])
msg_list.append(msg['id'])
if task[0]['state'] in ['SUCCESS', 'FAILED']:
finished = True
else:
time.sleep(10)
return 0
```
The code knows the task is "finished" if state is either SUCCESS or FAILED, but the FAILED state does not affect the return value.
### Issue Type
Bug Report
### Component Name
ansible-galaxy
### Ansible Version
```console
(venv) [jtanner@p1 galaxy_ng.role_exception_logging]$ ansible-galaxy --version
[WARNING]: You are running the development version of Ansible. You should only run Ansible from "devel" if you are modifying the Ansible engine, or trying
out features under development. This is a rapidly changing source of code and can become unstable at any point.
ansible-galaxy [core 2.17.0.dev0] (devel fd009a073a) last updated 2023/11/08 12:01:58 (GMT -400)
config file = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.cfg
configured module search path = ['/home/jtanner/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.core/lib/ansible
ansible collection location = /home/jtanner/.ansible/collections:/usr/share/ansible/collections
executable location = /home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/ansible.core/bin/ansible-galaxy
python version = 3.11.6 (main, Oct 3 2023, 00:00:00) [GCC 13.2.1 20230728 (Red Hat 13.2.1-1)] (/home/jtanner/workspace/github/jctanner.redhat/galaxy_ng.role_exception_logging/venv/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
N/A
```
### OS / Environment
Fedora 38
### Steps to Reproduce
1. Setup a galaxy server
2. ansible-galaxy role import -vvvv nephelaiio ansible-role-packetbeat
### Expected Results
The ansible-galaxy command should exit non-zero if the task is in a FAILED state.
### Actual Results
```console
exit 0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82175
|
https://github.com/ansible/ansible/pull/82193
|
7f2ad7eea673233223948e0d2a9fc5ee683040ce
|
fe81164fe548d79fbcd0024836d5f7474403c95d
| 2023-11-08T17:14:35Z |
python
| 2023-12-12T18:59:19Z |
test/units/galaxy/test_role_install.py
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import os
import functools
import pytest
import tempfile
from io import StringIO
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.galaxy import api, role, Galaxy
from ansible.module_utils.common.text.converters import to_text
from ansible.utils import context_objects as co
def call_galaxy_cli(args):
orig = co.GlobalCLIArgs._Singleton__instance
co.GlobalCLIArgs._Singleton__instance = None
try:
GalaxyCLI(args=['ansible-galaxy', 'role'] + args).run()
finally:
co.GlobalCLIArgs._Singleton__instance = orig
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture(autouse=True)
def galaxy_server():
context.CLIARGS._store = {'ignore_certs': False}
galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com')
return galaxy_api
@pytest.fixture(autouse=True)
def init_role_dir(tmp_path_factory):
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Roles Input'))
namespace = 'ansible_namespace'
role = 'role'
skeleton_path = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'role_skeleton')
call_galaxy_cli(['init', '%s.%s' % (namespace, role), '-c', '--init-path', test_dir, '--role-skeleton', skeleton_path])
def mock_NamedTemporaryFile(mocker, **args):
mock_ntf = mocker.MagicMock()
mock_ntf.write = mocker.MagicMock()
mock_ntf.close = mocker.MagicMock()
mock_ntf.name = None
return mock_ntf
@pytest.fixture
def init_mock_temp_file(mocker, monkeypatch):
monkeypatch.setattr(tempfile, 'NamedTemporaryFile', functools.partial(mock_NamedTemporaryFile, mocker))
@pytest.fixture(autouse=True)
def mock_role_download_api(mocker, monkeypatch):
mock_role_api = mocker.MagicMock()
mock_role_api.side_effect = [
StringIO(u''),
]
monkeypatch.setattr(role, 'open_url', mock_role_api)
return mock_role_api
def test_role_download_github(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
def test_role_download_github_default_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role').install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.2.tar.gz'
def test_role_download_github_no_download_url_for_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
def test_role_download_url(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1","download_url":"http://localhost:8080/test_owner/test_role/0.0.1.tar.gz"},'
u'{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'http://localhost:8080/test_owner/test_role/0.0.1.tar.gz'
def test_role_download_url_default_version(init_mock_temp_file, mocker, galaxy_server, mock_role_download_api, monkeypatch):
mock_api = mocker.MagicMock()
mock_api.side_effect = [
StringIO(u'{"available_versions":{"v1":"v1/"}}'),
StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
StringIO(u'{"results":[{"name": "0.0.1","download_url":"http://localhost:8080/test_owner/test_role/0.0.1.tar.gz"},'
u'{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
]
monkeypatch.setattr(api, 'open_url', mock_api)
role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role').install()
assert mock_role_download_api.call_count == 1
assert mock_role_download_api.mock_calls[0][1][0] == 'http://localhost:8080/test_owner/test_role/0.0.2.tar.gz'
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,363 |
Only one handler of a `listen` group is run when notified from another handler
|
### Summary
Hello!
Since ansible-core 2.15 and later (I think since the changes introduced by #79558), it seems that, when multiple handlers of a same `listen` group are notified by another handler, only the first one in the group will be run.
From what I understand, when Ansible handles a `notify` when it is already iterating handlers, it will only consider the first handler that matches the notification, but will not iterate through all the matching handlers. I believe the `break` in `lib/ansible/plugins/strategy/__init__.py` (line 669) should only apply when when Ansible is not already iterating handlers (i.e., in the `else` branch):
https://github.com/ansible/ansible/blob/6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0/lib/ansible/plugins/strategy/__init__.py#L659-L669
I think there seems to be a simple fix to that, which I'm planning to submit as a PR.
Thank you! :)
snip
### Issue Type
Bug Report
### Component Name
handlers
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = None
configured module search path = ['/home/user/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/user/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.6 (main, Nov 14 2023, 09:36:21) [GCC 13.2.1 20230801] (/usr/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = /usr/bin/vim
```
### OS / Environment
Arch Linux, with the following Ansible-related packages :
- ansible 9.0.1-1
- ansible-core 2.16.0-1
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
---
- name: test listen-based handlers with recursive notifications
hosts: localhost
gather_facts: false
tasks:
- name: notify handler 1
command: echo
changed_when: true
notify: handler 1
handlers:
- name: handler 1
debug:
msg: handler 1
changed_when: true
notify: handler_2
- name: handler 2a
debug:
msg: handler 2a
listen: handler_2
- name: handler 2b
debug:
msg: handler 2b
listen: handler_2
```
### Expected Results
All handlers should be run, especially both handlers listening on the `handler_2` notification (i.e., `handler 2a` and `handler 2b`):
```
PLAY [test listen-based handlers with recursive notifications] **************************************************************************************
TASK [notify handler 1] *****************************************************************************************************************************
task path: /tmp/ansible/test.yml:7
Notification for handler handler 1 has been saved.
changed: [localhost] => {"changed": true, "cmd": ["echo"], "delta": "0:00:00.004205", "end": "2023-12-06 10:57:16.556224", "msg": "", "rc": 0, "start": "2023-12-06 10:57:16.552019", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []}
NOTIFIED HANDLER handler 1 for localhost
RUNNING HANDLER [handler 1] *************************************************************************************************************************
task path: /tmp/ansible/test.yml:13
NOTIFIED HANDLER handler 2a for localhost
NOTIFIED HANDLER handler 2b for localhost
changed: [localhost] => {
"msg": "handler 1"
}
RUNNING HANDLER [handler 2a] ************************************************************************************************************************
task path: /tmp/ansible/test.yml:19
ok: [localhost] => {
"msg": "handler 2a"
}
RUNNING HANDLER [handler 2b] ************************************************************************************************************************
task path: /tmp/ansible/test.yml:24
ok: [localhost] => {
"msg": "handler 2b"
}
PLAY RECAP ******************************************************************************************************************************************
localhost : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
PLAY [test listen-based handlers with recursive notifications] ***********************************************************
TASK [notify handler 1] **************************************************************************************************
task path: /tmp/ansible/test.yml:7
Notification for handler handler 1 has been saved.
changed: [localhost] => {"changed": true, "cmd": ["echo"], "delta": "0:00:00.003800", "end": "2023-12-06 10:56:26.513556", "msg": "", "rc": 0, "start": "2023-12-06 10:56:26.509756", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []}
NOTIFIED HANDLER handler 1 for localhost
RUNNING HANDLER [handler 1] **********************************************************************************************
task path: /tmp/ansible/test.yml:13
NOTIFIED HANDLER handler 2a for localhost
changed: [localhost] => {
"msg": "handler 1"
}
RUNNING HANDLER [handler 2a] *********************************************************************************************
task path: /tmp/ansible/test.yml:19
ok: [localhost] => {
"msg": "handler 2a"
}
PLAY RECAP ***************************************************************************************************************
localhost : ok=3 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82363
|
https://github.com/ansible/ansible/pull/82364
|
fe81164fe548d79fbcd0024836d5f7474403c95d
|
83281531216ee64cd054959f2bfe54c6df498443
| 2023-12-06T10:05:01Z |
python
| 2023-12-13T09:56:52Z |
changelogs/fragments/82363-multiple-handlers-with-recursive-notification.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,363 |
Only one handler of a `listen` group is run when notified from another handler
|
### Summary
Hello!
Since ansible-core 2.15 and later (I think since the changes introduced by #79558), it seems that, when multiple handlers of a same `listen` group are notified by another handler, only the first one in the group will be run.
From what I understand, when Ansible handles a `notify` when it is already iterating handlers, it will only consider the first handler that matches the notification, but will not iterate through all the matching handlers. I believe the `break` in `lib/ansible/plugins/strategy/__init__.py` (line 669) should only apply when when Ansible is not already iterating handlers (i.e., in the `else` branch):
https://github.com/ansible/ansible/blob/6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0/lib/ansible/plugins/strategy/__init__.py#L659-L669
I think there seems to be a simple fix to that, which I'm planning to submit as a PR.
Thank you! :)
snip
### Issue Type
Bug Report
### Component Name
handlers
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = None
configured module search path = ['/home/user/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/user/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.6 (main, Nov 14 2023, 09:36:21) [GCC 13.2.1 20230801] (/usr/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = /usr/bin/vim
```
### OS / Environment
Arch Linux, with the following Ansible-related packages :
- ansible 9.0.1-1
- ansible-core 2.16.0-1
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
---
- name: test listen-based handlers with recursive notifications
hosts: localhost
gather_facts: false
tasks:
- name: notify handler 1
command: echo
changed_when: true
notify: handler 1
handlers:
- name: handler 1
debug:
msg: handler 1
changed_when: true
notify: handler_2
- name: handler 2a
debug:
msg: handler 2a
listen: handler_2
- name: handler 2b
debug:
msg: handler 2b
listen: handler_2
```
### Expected Results
All handlers should be run, especially both handlers listening on the `handler_2` notification (i.e., `handler 2a` and `handler 2b`):
```
PLAY [test listen-based handlers with recursive notifications] **************************************************************************************
TASK [notify handler 1] *****************************************************************************************************************************
task path: /tmp/ansible/test.yml:7
Notification for handler handler 1 has been saved.
changed: [localhost] => {"changed": true, "cmd": ["echo"], "delta": "0:00:00.004205", "end": "2023-12-06 10:57:16.556224", "msg": "", "rc": 0, "start": "2023-12-06 10:57:16.552019", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []}
NOTIFIED HANDLER handler 1 for localhost
RUNNING HANDLER [handler 1] *************************************************************************************************************************
task path: /tmp/ansible/test.yml:13
NOTIFIED HANDLER handler 2a for localhost
NOTIFIED HANDLER handler 2b for localhost
changed: [localhost] => {
"msg": "handler 1"
}
RUNNING HANDLER [handler 2a] ************************************************************************************************************************
task path: /tmp/ansible/test.yml:19
ok: [localhost] => {
"msg": "handler 2a"
}
RUNNING HANDLER [handler 2b] ************************************************************************************************************************
task path: /tmp/ansible/test.yml:24
ok: [localhost] => {
"msg": "handler 2b"
}
PLAY RECAP ******************************************************************************************************************************************
localhost : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
PLAY [test listen-based handlers with recursive notifications] ***********************************************************
TASK [notify handler 1] **************************************************************************************************
task path: /tmp/ansible/test.yml:7
Notification for handler handler 1 has been saved.
changed: [localhost] => {"changed": true, "cmd": ["echo"], "delta": "0:00:00.003800", "end": "2023-12-06 10:56:26.513556", "msg": "", "rc": 0, "start": "2023-12-06 10:56:26.509756", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []}
NOTIFIED HANDLER handler 1 for localhost
RUNNING HANDLER [handler 1] **********************************************************************************************
task path: /tmp/ansible/test.yml:13
NOTIFIED HANDLER handler 2a for localhost
changed: [localhost] => {
"msg": "handler 1"
}
RUNNING HANDLER [handler 2a] *********************************************************************************************
task path: /tmp/ansible/test.yml:19
ok: [localhost] => {
"msg": "handler 2a"
}
PLAY RECAP ***************************************************************************************************************
localhost : ok=3 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82363
|
https://github.com/ansible/ansible/pull/82364
|
fe81164fe548d79fbcd0024836d5f7474403c95d
|
83281531216ee64cd054959f2bfe54c6df498443
| 2023-12-06T10:05:01Z |
python
| 2023-12-13T09:56:52Z |
lib/ansible/plugins/strategy/__init__.py
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
import cmd
import functools
import os
import pprint
import queue
import sys
import threading
import time
import typing as t
from collections import deque
from multiprocessing import Lock
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible import context
from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleUndefinedVariable, AnsibleParserError
from ansible.executor import action_write_locks
from ansible.executor.play_iterator import IteratingStates, PlayIterator
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_result import TaskResult
from ansible.executor.task_queue_manager import CallbackSend, DisplaySend, PromptSend
from ansible.module_utils.six import string_types
from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.playbook.conditional import Conditional
from ansible.playbook.handler import Handler
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.task import Task
from ansible.playbook.task_include import TaskInclude
from ansible.plugins import loader as plugin_loader
from ansible.template import Templar
from ansible.utils.display import Display
from ansible.utils.fqcn import add_internal_fqcns
from ansible.utils.unsafe_proxy import wrap_var
from ansible.utils.vars import combine_vars, isidentifier
from ansible.vars.clean import strip_internal_keys, module_response_deepcopy
display = Display()
__all__ = ['StrategyBase']
# This list can be an exact match, or start of string bound
# does not accept regex
ALWAYS_DELEGATE_FACT_PREFIXES = frozenset((
'discovered_interpreter_',
))
class StrategySentinel:
pass
_sentinel = StrategySentinel()
def post_process_whens(result, task, templar, task_vars):
cond = None
if task.changed_when:
with templar.set_temporary_context(available_variables=task_vars):
cond = Conditional(loader=templar._loader)
cond.when = task.changed_when
result['changed'] = cond.evaluate_conditional(templar, templar.available_variables)
if task.failed_when:
with templar.set_temporary_context(available_variables=task_vars):
if cond is None:
cond = Conditional(loader=templar._loader)
cond.when = task.failed_when
failed_when_result = cond.evaluate_conditional(templar, templar.available_variables)
result['failed_when_result'] = result['failed'] = failed_when_result
def _get_item_vars(result, task):
item_vars = {}
if task.loop or task.loop_with:
loop_var = result.get('ansible_loop_var', 'item')
index_var = result.get('ansible_index_var')
if loop_var in result:
item_vars[loop_var] = result[loop_var]
if index_var and index_var in result:
item_vars[index_var] = result[index_var]
if '_ansible_item_label' in result:
item_vars['_ansible_item_label'] = result['_ansible_item_label']
if 'ansible_loop' in result:
item_vars['ansible_loop'] = result['ansible_loop']
return item_vars
def results_thread_main(strategy):
while True:
try:
result = strategy._final_q.get()
if isinstance(result, StrategySentinel):
break
elif isinstance(result, DisplaySend):
dmethod = getattr(display, result.method)
dmethod(*result.args, **result.kwargs)
elif isinstance(result, CallbackSend):
for arg in result.args:
if isinstance(arg, TaskResult):
strategy.normalize_task_result(arg)
break
strategy._tqm.send_callback(result.method_name, *result.args, **result.kwargs)
elif isinstance(result, TaskResult):
strategy.normalize_task_result(result)
with strategy._results_lock:
strategy._results.append(result)
elif isinstance(result, PromptSend):
try:
value = display.prompt_until(
result.prompt,
private=result.private,
seconds=result.seconds,
complete_input=result.complete_input,
interrupt_input=result.interrupt_input,
)
except AnsibleError as e:
value = e
except BaseException as e:
# relay unexpected errors so bugs in display are reported and don't cause workers to hang
try:
raise AnsibleError(f"{e}") from e
except AnsibleError as e:
value = e
strategy._workers[result.worker_id].worker_queue.put(value)
else:
display.warning('Received an invalid object (%s) in the result queue: %r' % (type(result), result))
except (IOError, EOFError):
break
except queue.Empty:
pass
def debug_closure(func):
"""Closure to wrap ``StrategyBase._process_pending_results`` and invoke the task debugger"""
@functools.wraps(func)
def inner(self, iterator, one_pass=False, max_passes=None):
status_to_stats_map = (
('is_failed', 'failures'),
('is_unreachable', 'dark'),
('is_changed', 'changed'),
('is_skipped', 'skipped'),
)
# We don't know the host yet, copy the previous states, for lookup after we process new results
prev_host_states = iterator.host_states.copy()
results = func(self, iterator, one_pass=one_pass, max_passes=max_passes)
_processed_results = []
for result in results:
task = result._task
host = result._host
_queued_task_args = self._queued_task_cache.pop((host.name, task._uuid), None)
task_vars = _queued_task_args['task_vars']
play_context = _queued_task_args['play_context']
# Try to grab the previous host state, if it doesn't exist use get_host_state to generate an empty state
try:
prev_host_state = prev_host_states[host.name]
except KeyError:
prev_host_state = iterator.get_host_state(host)
while result.needs_debugger(globally_enabled=self.debugger_active):
next_action = NextAction()
dbg = Debugger(task, host, task_vars, play_context, result, next_action)
dbg.cmdloop()
if next_action.result == NextAction.REDO:
# rollback host state
self._tqm.clear_failed_hosts()
if task.run_once and iterator._play.strategy in add_internal_fqcns(('linear',)) and result.is_failed():
for host_name, state in prev_host_states.items():
if host_name == host.name:
continue
iterator.set_state_for_host(host_name, state)
iterator._play._removed_hosts.remove(host_name)
iterator.set_state_for_host(host.name, prev_host_state)
for method, what in status_to_stats_map:
if getattr(result, method)():
self._tqm._stats.decrement(what, host.name)
self._tqm._stats.decrement('ok', host.name)
# redo
self._queue_task(host, task, task_vars, play_context)
_processed_results.extend(debug_closure(func)(self, iterator, one_pass))
break
elif next_action.result == NextAction.CONTINUE:
_processed_results.append(result)
break
elif next_action.result == NextAction.EXIT:
# Matches KeyboardInterrupt from bin/ansible
sys.exit(99)
else:
_processed_results.append(result)
return _processed_results
return inner
class StrategyBase:
'''
This is the base class for strategy plugins, which contains some common
code useful to all strategies like running handlers, cleanup actions, etc.
'''
# by default, strategies should support throttling but we allow individual
# strategies to disable this and either forego supporting it or managing
# the throttling internally (as `free` does)
ALLOW_BASE_THROTTLING = True
def __init__(self, tqm):
self._tqm = tqm
self._inventory = tqm.get_inventory()
self._workers = tqm._workers
self._variable_manager = tqm.get_variable_manager()
self._loader = tqm.get_loader()
self._final_q = tqm._final_q
self._step = context.CLIARGS.get('step', False)
self._diff = context.CLIARGS.get('diff', False)
# the task cache is a dictionary of tuples of (host.name, task._uuid)
# used to find the original task object of in-flight tasks and to store
# the task args/vars and play context info used to queue the task.
self._queued_task_cache = {}
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
# internal counters
self._pending_results = 0
self._cur_worker = 0
# this dictionary is used to keep track of hosts that have
# outstanding tasks still in queue
self._blocked_hosts = dict()
self._results = deque()
self._results_lock = threading.Condition(threading.Lock())
self._worker_queues = dict()
# create the result processing thread for reading results in the background
self._results_thread = threading.Thread(target=results_thread_main, args=(self,))
self._results_thread.daemon = True
self._results_thread.start()
# holds the list of active (persistent) connections to be shutdown at
# play completion
self._active_connections = dict()
# Caches for get_host calls, to avoid calling excessively
# These values should be set at the top of the ``run`` method of each
# strategy plugin. Use ``_set_hosts_cache`` to set these values
self._hosts_cache = []
self._hosts_cache_all = []
self.debugger_active = C.ENABLE_TASK_DEBUGGER
def _set_hosts_cache(self, play, refresh=True):
"""Responsible for setting _hosts_cache and _hosts_cache_all
See comment in ``__init__`` for the purpose of these caches
"""
if not refresh and all((self._hosts_cache, self._hosts_cache_all)):
return
if not play.finalized and Templar(None).is_template(play.hosts):
_pattern = 'all'
else:
_pattern = play.hosts or 'all'
self._hosts_cache_all = [h.name for h in self._inventory.get_hosts(pattern=_pattern, ignore_restrictions=True)]
self._hosts_cache = [h.name for h in self._inventory.get_hosts(play.hosts, order=play.order)]
def cleanup(self):
# close active persistent connections
for sock in self._active_connections.values():
try:
conn = Connection(sock)
conn.reset()
except ConnectionError as e:
# most likely socket is already closed
display.debug("got an error while closing persistent connection: %s" % e)
self._final_q.put(_sentinel)
self._results_thread.join()
def run(self, iterator, play_context, result=0):
# execute one more pass through the iterator without peeking, to
# make sure that all of the hosts are advanced to their final task.
# This should be safe, as everything should be IteratingStates.COMPLETE by
# this point, though the strategy may not advance the hosts itself.
for host in self._hosts_cache:
if host not in self._tqm._unreachable_hosts:
try:
iterator.get_next_task_for_host(self._inventory.hosts[host])
except KeyError:
iterator.get_next_task_for_host(self._inventory.get_host(host))
# return the appropriate code, depending on the status hosts after the run
if not isinstance(result, bool) and result != self._tqm.RUN_OK:
return result
elif len(self._tqm._unreachable_hosts.keys()) > 0:
return self._tqm.RUN_UNREACHABLE_HOSTS
elif len(iterator.get_failed_hosts()) > 0:
return self._tqm.RUN_FAILED_HOSTS
else:
return self._tqm.RUN_OK
def get_hosts_remaining(self, play):
self._set_hosts_cache(play, refresh=False)
ignore = set(self._tqm._failed_hosts).union(self._tqm._unreachable_hosts)
return [host for host in self._hosts_cache if host not in ignore]
def get_failed_hosts(self, play):
self._set_hosts_cache(play, refresh=False)
return [host for host in self._hosts_cache if host in self._tqm._failed_hosts]
def add_tqm_variables(self, vars, play):
'''
Base class method to add extra variables/information to the list of task
vars sent through the executor engine regarding the task queue manager state.
'''
vars['ansible_current_hosts'] = self.get_hosts_remaining(play)
vars['ansible_failed_hosts'] = self.get_failed_hosts(play)
def _queue_task(self, host, task, task_vars, play_context):
''' handles queueing the task up to be sent to a worker '''
display.debug("entering _queue_task() for %s/%s" % (host.name, task.action))
# Add a write lock for tasks.
# Maybe this should be added somewhere further up the call stack but
# this is the earliest in the code where we have task (1) extracted
# into its own variable and (2) there's only a single code path
# leading to the module being run. This is called by two
# functions: linear.py::run(), and
# free.py::run() so we'd have to add to both to do it there.
# The next common higher level is __init__.py::run() and that has
# tasks inside of play_iterator so we'd have to extract them to do it
# there.
if task.action not in action_write_locks.action_write_locks:
display.debug('Creating lock for %s' % task.action)
action_write_locks.action_write_locks[task.action] = Lock()
# create a templar and template things we need later for the queuing process
templar = Templar(loader=self._loader, variables=task_vars)
try:
throttle = int(templar.template(task.throttle))
except Exception as e:
raise AnsibleError("Failed to convert the throttle value to an integer.", obj=task._ds, orig_exc=e)
# and then queue the new task
try:
# Determine the "rewind point" of the worker list. This means we start
# iterating over the list of workers until the end of the list is found.
# Normally, that is simply the length of the workers list (as determined
# by the forks or serial setting), however a task/block/play may "throttle"
# that limit down.
rewind_point = len(self._workers)
if throttle > 0 and self.ALLOW_BASE_THROTTLING:
if task.run_once:
display.debug("Ignoring 'throttle' as 'run_once' is also set for '%s'" % task.get_name())
else:
if throttle <= rewind_point:
display.debug("task: %s, throttle: %d" % (task.get_name(), throttle))
rewind_point = throttle
queued = False
starting_worker = self._cur_worker
while True:
if self._cur_worker >= rewind_point:
self._cur_worker = 0
worker_prc = self._workers[self._cur_worker]
if worker_prc is None or not worker_prc.is_alive():
self._queued_task_cache[(host.name, task._uuid)] = {
'host': host,
'task': task,
'task_vars': task_vars,
'play_context': play_context
}
# Pass WorkerProcess its strategy worker number so it can send an identifier along with intra-task requests
worker_prc = WorkerProcess(
self._final_q, task_vars, host, task, play_context, self._loader, self._variable_manager, plugin_loader, self._cur_worker,
)
self._workers[self._cur_worker] = worker_prc
self._tqm.send_callback('v2_runner_on_start', host, task)
worker_prc.start()
display.debug("worker is %d (out of %d available)" % (self._cur_worker + 1, len(self._workers)))
queued = True
self._cur_worker += 1
if self._cur_worker >= rewind_point:
self._cur_worker = 0
if queued:
break
elif self._cur_worker == starting_worker:
time.sleep(0.0001)
self._pending_results += 1
except (EOFError, IOError, AssertionError) as e:
# most likely an abort
display.debug("got an error while queuing: %s" % e)
return
display.debug("exiting _queue_task() for %s/%s" % (host.name, task.action))
def get_task_hosts(self, iterator, task_host, task):
if task.run_once:
host_list = [host for host in self._hosts_cache if host not in self._tqm._unreachable_hosts]
else:
host_list = [task_host.name]
return host_list
def get_delegated_hosts(self, result, task):
host_name = result.get('_ansible_delegated_vars', {}).get('ansible_delegated_host', None)
return [host_name or task.delegate_to]
def _set_always_delegated_facts(self, result, task):
"""Sets host facts for ``delegate_to`` hosts for facts that should
always be delegated
This operation mutates ``result`` to remove the always delegated facts
See ``ALWAYS_DELEGATE_FACT_PREFIXES``
"""
if task.delegate_to is None:
return
facts = result['ansible_facts']
always_keys = set()
_add = always_keys.add
for fact_key in facts:
for always_key in ALWAYS_DELEGATE_FACT_PREFIXES:
if fact_key.startswith(always_key):
_add(fact_key)
if always_keys:
_pop = facts.pop
always_facts = {
'ansible_facts': dict((k, _pop(k)) for k in list(facts) if k in always_keys)
}
host_list = self.get_delegated_hosts(result, task)
_set_host_facts = self._variable_manager.set_host_facts
for target_host in host_list:
_set_host_facts(target_host, always_facts)
def normalize_task_result(self, task_result):
"""Normalize a TaskResult to reference actual Host and Task objects
when only given the ``Host.name``, or the ``Task._uuid``
Only the ``Host.name`` and ``Task._uuid`` are commonly sent back from
the ``TaskExecutor`` or ``WorkerProcess`` due to performance concerns
Mutates the original object
"""
if isinstance(task_result._host, string_types):
# If the value is a string, it is ``Host.name``
task_result._host = self._inventory.get_host(to_text(task_result._host))
if isinstance(task_result._task, string_types):
# If the value is a string, it is ``Task._uuid``
queue_cache_entry = (task_result._host.name, task_result._task)
try:
found_task = self._queued_task_cache[queue_cache_entry]['task']
except KeyError:
# This should only happen due to an implicit task created by the
# TaskExecutor, restrict this behavior to the explicit use case
# of an implicit async_status task
if task_result._task_fields.get('action') != 'async_status':
raise
original_task = Task()
else:
original_task = found_task.copy(exclude_parent=True, exclude_tasks=True)
original_task._parent = found_task._parent
original_task.from_attrs(task_result._task_fields)
task_result._task = original_task
return task_result
def search_handlers_by_notification(self, notification: str, iterator: PlayIterator) -> t.Generator[Handler, None, None]:
templar = Templar(None)
handlers = [h for b in reversed(iterator._play.handlers) for h in b.block]
# iterate in reversed order since last handler loaded with the same name wins
for handler in handlers:
if not handler.name:
continue
if not handler.cached_name:
if templar.is_template(handler.name):
templar.available_variables = self._variable_manager.get_vars(
play=iterator._play,
task=handler,
_hosts=self._hosts_cache,
_hosts_all=self._hosts_cache_all
)
try:
handler.name = templar.template(handler.name)
except (UndefinedError, AnsibleUndefinedVariable) as e:
# We skip this handler due to the fact that it may be using
# a variable in the name that was conditionally included via
# set_fact or some other method, and we don't want to error
# out unnecessarily
if not handler.listen:
display.warning(
"Handler '%s' is unusable because it has no listen topics and "
"the name could not be templated (host-specific variables are "
"not supported in handler names). The error: %s" % (handler.name, to_text(e))
)
continue
handler.cached_name = True
# first we check with the full result of get_name(), which may
# include the role name (if the handler is from a role). If that
# is not found, we resort to the simple name field, which doesn't
# have anything extra added to it.
if notification in {
handler.name,
handler.get_name(include_role_fqcn=False),
handler.get_name(include_role_fqcn=True),
}:
yield handler
break
templar.available_variables = {}
seen = []
for handler in handlers:
if listeners := handler.listen:
if notification in handler.get_validated_value(
'listen',
handler.fattributes.get('listen'),
listeners,
templar,
):
if handler.name and handler.name in seen:
continue
seen.append(handler.name)
yield handler
@debug_closure
def _process_pending_results(self, iterator, one_pass=False, max_passes=None):
'''
Reads results off the final queue and takes appropriate action
based on the result (executing callbacks, updating state, etc.).
'''
ret_results = []
cur_pass = 0
while True:
try:
self._results_lock.acquire()
task_result = self._results.popleft()
except IndexError:
break
finally:
self._results_lock.release()
original_host = task_result._host
original_task = task_result._task
# all host status messages contain 2 entries: (msg, task_result)
role_ran = False
if task_result.is_failed():
role_ran = True
ignore_errors = original_task.ignore_errors
if not ignore_errors:
# save the current state before failing it for later inspection
state_when_failed = iterator.get_state_for_host(original_host.name)
display.debug("marking %s as failed" % original_host.name)
if original_task.run_once:
# if we're using run_once, we have to fail every host here
for h in self._inventory.get_hosts(iterator._play.hosts):
if h.name not in self._tqm._unreachable_hosts:
iterator.mark_host_failed(h)
else:
iterator.mark_host_failed(original_host)
state, dummy = iterator.get_next_task_for_host(original_host, peek=True)
if iterator.is_failed(original_host) and state and state.run_state == IteratingStates.COMPLETE:
self._tqm._failed_hosts[original_host.name] = True
# if we're iterating on the rescue portion of a block then
# we save the failed task in a special var for use
# within the rescue/always
if iterator.is_any_block_rescuing(state_when_failed):
self._tqm._stats.increment('rescued', original_host.name)
iterator._play._removed_hosts.remove(original_host.name)
self._variable_manager.set_nonpersistent_facts(
original_host.name,
dict(
ansible_failed_task=wrap_var(original_task.serialize()),
ansible_failed_result=task_result._result,
),
)
else:
self._tqm._stats.increment('failures', original_host.name)
else:
self._tqm._stats.increment('ok', original_host.name)
self._tqm._stats.increment('ignored', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
self._tqm.send_callback('v2_runner_on_failed', task_result, ignore_errors=ignore_errors)
elif task_result.is_unreachable():
ignore_unreachable = original_task.ignore_unreachable
if not ignore_unreachable:
self._tqm._unreachable_hosts[original_host.name] = True
iterator._play._removed_hosts.append(original_host.name)
self._tqm._stats.increment('dark', original_host.name)
else:
self._tqm._stats.increment('ok', original_host.name)
self._tqm._stats.increment('ignored', original_host.name)
self._tqm.send_callback('v2_runner_on_unreachable', task_result)
elif task_result.is_skipped():
self._tqm._stats.increment('skipped', original_host.name)
self._tqm.send_callback('v2_runner_on_skipped', task_result)
else:
role_ran = True
if original_task.loop:
# this task had a loop, and has more than one result, so
# loop over all of them instead of a single result
result_items = task_result._result.get('results', [])
else:
result_items = [task_result._result]
for result_item in result_items:
if '_ansible_notify' in result_item and task_result.is_changed():
# only ensure that notified handlers exist, if so save the notifications for when
# handlers are actually flushed so the last defined handlers are exexcuted,
# otherwise depending on the setting either error or warn
host_state = iterator.get_state_for_host(original_host.name)
for notification in result_item['_ansible_notify']:
for handler in self.search_handlers_by_notification(notification, iterator):
if host_state.run_state == IteratingStates.HANDLERS:
# we're currently iterating handlers, so we need to expand this now
if handler.notify_host(original_host):
# NOTE even with notifications deduplicated this can still happen in case of handlers being
# notified multiple times using different names, like role name or fqcn
self._tqm.send_callback('v2_playbook_on_notify', handler, original_host)
else:
iterator.add_notification(original_host.name, notification)
display.vv(f"Notification for handler {notification} has been saved.")
break
else:
msg = (
f"The requested handler '{notification}' was not found in either the main handlers"
" list nor in the listening handlers list"
)
if C.ERROR_ON_MISSING_HANDLER:
raise AnsibleError(msg)
else:
display.warning(msg)
if 'add_host' in result_item:
# this task added a new host (add_host module)
new_host_info = result_item.get('add_host', dict())
self._inventory.add_dynamic_host(new_host_info, result_item)
# ensure host is available for subsequent plays
if result_item.get('changed') and new_host_info['host_name'] not in self._hosts_cache_all:
self._hosts_cache_all.append(new_host_info['host_name'])
elif 'add_group' in result_item:
# this task added a new group (group_by module)
self._inventory.add_dynamic_group(original_host, result_item)
if 'add_host' in result_item or 'add_group' in result_item:
item_vars = _get_item_vars(result_item, original_task)
found_task_vars = self._queued_task_cache.get((original_host.name, task_result._task._uuid))['task_vars']
if item_vars:
all_task_vars = combine_vars(found_task_vars, item_vars)
else:
all_task_vars = found_task_vars
all_task_vars[original_task.register] = wrap_var(result_item)
post_process_whens(result_item, original_task, Templar(self._loader), all_task_vars)
if original_task.loop or original_task.loop_with:
new_item_result = TaskResult(
task_result._host,
task_result._task,
result_item,
task_result._task_fields,
)
self._tqm.send_callback('v2_runner_item_on_ok', new_item_result)
if result_item.get('changed', False):
task_result._result['changed'] = True
if result_item.get('failed', False):
task_result._result['failed'] = True
if 'ansible_facts' in result_item and original_task.action not in C._ACTION_DEBUG:
# if delegated fact and we are delegating facts, we need to change target host for them
if original_task.delegate_to is not None and original_task.delegate_facts:
host_list = self.get_delegated_hosts(result_item, original_task)
else:
# Set facts that should always be on the delegated hosts
self._set_always_delegated_facts(result_item, original_task)
host_list = self.get_task_hosts(iterator, original_host, original_task)
if original_task.action in C._ACTION_INCLUDE_VARS:
for (var_name, var_value) in result_item['ansible_facts'].items():
# find the host we're actually referring too here, which may
# be a host that is not really in inventory at all
for target_host in host_list:
self._variable_manager.set_host_variable(target_host, var_name, var_value)
else:
cacheable = result_item.pop('_ansible_facts_cacheable', False)
for target_host in host_list:
# so set_fact is a misnomer but 'cacheable = true' was meant to create an 'actual fact'
# to avoid issues with precedence and confusion with set_fact normal operation,
# we set BOTH fact and nonpersistent_facts (aka hostvar)
# when fact is retrieved from cache in subsequent operations it will have the lower precedence,
# but for playbook setting it the 'higher' precedence is kept
is_set_fact = original_task.action in C._ACTION_SET_FACT
if not is_set_fact or cacheable:
self._variable_manager.set_host_facts(target_host, result_item['ansible_facts'].copy())
if is_set_fact:
self._variable_manager.set_nonpersistent_facts(target_host, result_item['ansible_facts'].copy())
if 'ansible_stats' in result_item and 'data' in result_item['ansible_stats'] and result_item['ansible_stats']['data']:
if 'per_host' not in result_item['ansible_stats'] or result_item['ansible_stats']['per_host']:
host_list = self.get_task_hosts(iterator, original_host, original_task)
else:
host_list = [None]
data = result_item['ansible_stats']['data']
aggregate = 'aggregate' in result_item['ansible_stats'] and result_item['ansible_stats']['aggregate']
for myhost in host_list:
for k in data.keys():
if aggregate:
self._tqm._stats.update_custom_stats(k, data[k], myhost)
else:
self._tqm._stats.set_custom_stats(k, data[k], myhost)
if 'diff' in task_result._result:
if self._diff or getattr(original_task, 'diff', False):
self._tqm.send_callback('v2_on_file_diff', task_result)
if not isinstance(original_task, TaskInclude):
self._tqm._stats.increment('ok', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
# finally, send the ok for this task
self._tqm.send_callback('v2_runner_on_ok', task_result)
# register final results
if original_task.register:
if not isidentifier(original_task.register):
raise AnsibleError("Invalid variable name in 'register' specified: '%s'" % original_task.register)
host_list = self.get_task_hosts(iterator, original_host, original_task)
clean_copy = strip_internal_keys(module_response_deepcopy(task_result._result))
if 'invocation' in clean_copy:
del clean_copy['invocation']
for target_host in host_list:
self._variable_manager.set_nonpersistent_facts(target_host, {original_task.register: clean_copy})
self._pending_results -= 1
if original_host.name in self._blocked_hosts:
del self._blocked_hosts[original_host.name]
# If this is a role task, mark the parent role as being run (if
# the task was ok or failed, but not skipped or unreachable)
if original_task._role is not None and role_ran: # TODO: and original_task.action not in C._ACTION_INCLUDE_ROLE:?
# lookup the role in the role cache to make sure we're dealing
# with the correct object and mark it as executed
role_obj = self._get_cached_role(original_task, iterator._play)
role_obj._had_task_run[original_host.name] = True
ret_results.append(task_result)
if one_pass or max_passes is not None and (cur_pass + 1) >= max_passes:
break
cur_pass += 1
return ret_results
def _wait_on_pending_results(self, iterator):
'''
Wait for the shared counter to drop to zero, using a short sleep
between checks to ensure we don't spin lock
'''
ret_results = []
display.debug("waiting for pending results...")
while self._pending_results > 0 and not self._tqm._terminated:
if self._tqm.has_dead_workers():
raise AnsibleError("A worker was found in a dead state")
results = self._process_pending_results(iterator)
ret_results.extend(results)
if self._pending_results > 0:
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
display.debug("no more pending results, returning what we have")
return ret_results
def _copy_included_file(self, included_file):
'''
A proven safe and performant way to create a copy of an included file
'''
ti_copy = included_file._task.copy(exclude_parent=True)
ti_copy._parent = included_file._task._parent
temp_vars = ti_copy.vars | included_file._vars
ti_copy.vars = temp_vars
return ti_copy
def _load_included_file(self, included_file, iterator, is_handler=False):
'''
Loads an included YAML file of tasks, applying the optional set of variables.
Raises AnsibleError exception in case of a failure during including a file,
in such case the caller is responsible for marking the host(s) as failed
using PlayIterator.mark_host_failed().
'''
display.debug("loading included file: %s" % included_file._filename)
try:
data = self._loader.load_from_file(included_file._filename)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleError("included task files must contain a list of tasks")
ti_copy = self._copy_included_file(included_file)
block_list = load_list_of_blocks(
data,
play=iterator._play,
parent_block=ti_copy.build_parent_block(),
role=included_file._task._role,
use_handlers=is_handler,
loader=self._loader,
variable_manager=self._variable_manager,
)
# since we skip incrementing the stats when the task result is
# first processed, we do so now for each host in the list
for host in included_file._hosts:
self._tqm._stats.increment('ok', host.name)
except AnsibleParserError:
raise
except AnsibleError as e:
if isinstance(e, AnsibleFileNotFound):
reason = "Could not find or access '%s' on the Ansible Controller." % to_text(e.file_name)
else:
reason = to_text(e)
for r in included_file._results:
r._result['failed'] = True
for host in included_file._hosts:
tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=reason))
self._tqm._stats.increment('failures', host.name)
self._tqm.send_callback('v2_runner_on_failed', tr)
raise AnsibleError(reason) from e
# finally, send the callback and return the list of blocks loaded
self._tqm.send_callback('v2_playbook_on_include', included_file)
display.debug("done processing included file")
return block_list
def _take_step(self, task, host=None):
ret = False
msg = u'Perform task: %s ' % task
if host:
msg += u'on %s ' % host
msg += u'(N)o/(y)es/(c)ontinue: '
resp = display.prompt(msg)
if resp.lower() in ['y', 'yes']:
display.debug("User ran task")
ret = True
elif resp.lower() in ['c', 'continue']:
display.debug("User ran task and canceled step mode")
self._step = False
ret = True
else:
display.debug("User skipped task")
display.banner(msg)
return ret
def _cond_not_supported_warn(self, task_name):
display.warning("%s task does not support when conditional" % task_name)
def _execute_meta(self, task, play_context, iterator, target_host):
# meta tasks store their args in the _raw_params field of args,
# since they do not use k=v pairs, so get that
meta_action = task.args.get('_raw_params')
def _evaluate_conditional(h):
all_vars = self._variable_manager.get_vars(play=iterator._play, host=h, task=task,
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
templar = Templar(loader=self._loader, variables=all_vars)
return task.evaluate_conditional(templar, all_vars)
skipped = False
msg = meta_action
skip_reason = '%s conditional evaluated to False' % meta_action
if isinstance(task, Handler):
self._tqm.send_callback('v2_playbook_on_handler_task_start', task)
else:
self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
# These don't support "when" conditionals
if meta_action in ('noop', 'refresh_inventory', 'reset_connection') and task.when:
self._cond_not_supported_warn(meta_action)
if meta_action == 'noop':
msg = "noop"
elif meta_action == 'flush_handlers':
if _evaluate_conditional(target_host):
host_state = iterator.get_state_for_host(target_host.name)
# actually notify proper handlers based on all notifications up to this point
for notification in list(host_state.handler_notifications):
for handler in self.search_handlers_by_notification(notification, iterator):
if handler.notify_host(target_host):
# NOTE even with notifications deduplicated this can still happen in case of handlers being
# notified multiple times using different names, like role name or fqcn
self._tqm.send_callback('v2_playbook_on_notify', handler, target_host)
iterator.clear_notification(target_host.name, notification)
if host_state.run_state == IteratingStates.HANDLERS:
raise AnsibleError('flush_handlers cannot be used as a handler')
if target_host.name not in self._tqm._unreachable_hosts:
host_state.pre_flushing_run_state = host_state.run_state
host_state.run_state = IteratingStates.HANDLERS
msg = "triggered running handlers for %s" % target_host.name
else:
skipped = True
skip_reason += ', not running handlers for %s' % target_host.name
elif meta_action == 'refresh_inventory':
self._inventory.refresh_inventory()
self._set_hosts_cache(iterator._play)
msg = "inventory successfully refreshed"
elif meta_action == 'clear_facts':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
hostname = host.get_name()
self._variable_manager.clear_facts(hostname)
msg = "facts cleared"
else:
skipped = True
skip_reason += ', not clearing facts and fact cache for %s' % target_host.name
elif meta_action == 'clear_host_errors':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
self._tqm._failed_hosts.pop(host.name, False)
self._tqm._unreachable_hosts.pop(host.name, False)
iterator.clear_host_errors(host)
msg = "cleared host errors"
else:
skipped = True
skip_reason += ', not clearing host error state for %s' % target_host.name
elif meta_action == 'end_batch':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
if host.name not in self._tqm._unreachable_hosts:
iterator.set_run_state_for_host(host.name, IteratingStates.COMPLETE)
msg = "ending batch"
else:
skipped = True
skip_reason += ', continuing current batch'
elif meta_action == 'end_play':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
if host.name not in self._tqm._unreachable_hosts:
iterator.set_run_state_for_host(host.name, IteratingStates.COMPLETE)
# end_play is used in PlaybookExecutor/TQM to indicate that
# the whole play is supposed to be ended as opposed to just a batch
iterator.end_play = True
msg = "ending play"
else:
skipped = True
skip_reason += ', continuing play'
elif meta_action == 'end_host':
if _evaluate_conditional(target_host):
iterator.set_run_state_for_host(target_host.name, IteratingStates.COMPLETE)
iterator._play._removed_hosts.append(target_host.name)
msg = "ending play for %s" % target_host.name
else:
skipped = True
skip_reason += ", continuing execution for %s" % target_host.name
# TODO: Nix msg here? Left for historical reasons, but skip_reason exists now.
msg = "end_host conditional evaluated to false, continuing execution for %s" % target_host.name
elif meta_action == 'role_complete':
# Allow users to use this in a play as reported in https://github.com/ansible/ansible/issues/22286?
# How would this work with allow_duplicates??
if task.implicit:
role_obj = self._get_cached_role(task, iterator._play)
if target_host.name in role_obj._had_task_run:
role_obj._completed[target_host.name] = True
msg = 'role_complete for %s' % target_host.name
elif meta_action == 'reset_connection':
all_vars = self._variable_manager.get_vars(play=iterator._play, host=target_host, task=task,
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
templar = Templar(loader=self._loader, variables=all_vars)
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
play_context = play_context.set_task_and_variable_override(task=task, variables=all_vars, templar=templar)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
play_context.post_validate(templar=templar)
# now that the play context is finalized, if the remote_addr is not set
# default to using the host's address field as the remote address
if not play_context.remote_addr:
play_context.remote_addr = target_host.address
# We also add "magic" variables back into the variables dict to make sure
# a certain subset of variables exist. This 'mostly' works here cause meta
# disregards the loop, but should not really use play_context at all
play_context.update_vars(all_vars)
if target_host in self._active_connections:
connection = Connection(self._active_connections[target_host])
del self._active_connections[target_host]
else:
connection = plugin_loader.connection_loader.get(play_context.connection, play_context, os.devnull)
connection.set_options(task_keys=task.dump_attrs(), var_options=all_vars)
play_context.set_attributes_from_plugin(connection)
if connection:
try:
connection.reset()
msg = 'reset connection'
except ConnectionError as e:
# most likely socket is already closed
display.debug("got an error while closing persistent connection: %s" % e)
else:
msg = 'no connection, nothing to reset'
else:
raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
result = {'msg': msg}
if skipped:
result['skipped'] = True
result['skip_reason'] = skip_reason
else:
result['changed'] = False
if not task.implicit:
header = skip_reason if skipped else msg
display.vv(f"META: {header}")
res = TaskResult(target_host, task, result)
if skipped:
self._tqm.send_callback('v2_runner_on_skipped', res)
return [res]
def _get_cached_role(self, task, play):
role_path = task._role.get_role_path()
role_cache = play.role_cache[role_path]
try:
idx = role_cache.index(task._role)
return role_cache[idx]
except ValueError:
raise AnsibleError(f'Cannot locate {task._role.get_name()} in role cache')
def get_hosts_left(self, iterator):
''' returns list of available hosts for this iterator by filtering out unreachables '''
hosts_left = []
for host in self._hosts_cache:
if host not in self._tqm._unreachable_hosts:
try:
hosts_left.append(self._inventory.hosts[host])
except KeyError:
hosts_left.append(self._inventory.get_host(host))
return hosts_left
def update_active_connections(self, results):
''' updates the current active persistent connections '''
for r in results:
if 'args' in r._task_fields:
socket_path = r._task_fields['args'].get('_ansible_socket')
if socket_path:
if r._host not in self._active_connections:
self._active_connections[r._host] = socket_path
class NextAction(object):
""" The next action after an interpreter's exit. """
REDO = 1
CONTINUE = 2
EXIT = 3
def __init__(self, result=EXIT):
self.result = result
class Debugger(cmd.Cmd):
prompt_continuous = '> ' # multiple lines
def __init__(self, task, host, task_vars, play_context, result, next_action):
# cmd.Cmd is old-style class
cmd.Cmd.__init__(self)
self.prompt = '[%s] %s (debug)> ' % (host, task)
self.intro = None
self.scope = {}
self.scope['task'] = task
self.scope['task_vars'] = task_vars
self.scope['host'] = host
self.scope['play_context'] = play_context
self.scope['result'] = result
self.next_action = next_action
def cmdloop(self):
try:
cmd.Cmd.cmdloop(self)
except KeyboardInterrupt:
pass
do_h = cmd.Cmd.do_help
def do_EOF(self, args):
"""Quit"""
return self.do_quit(args)
def do_quit(self, args):
"""Quit"""
display.display('User interrupted execution')
self.next_action.result = NextAction.EXIT
return True
do_q = do_quit
def do_continue(self, args):
"""Continue to next result"""
self.next_action.result = NextAction.CONTINUE
return True
do_c = do_continue
def do_redo(self, args):
"""Schedule task for re-execution. The re-execution may not be the next result"""
self.next_action.result = NextAction.REDO
return True
do_r = do_redo
def do_update_task(self, args):
"""Recreate the task from ``task._ds``, and template with updated ``task_vars``"""
templar = Templar(None, variables=self.scope['task_vars'])
task = self.scope['task']
task = task.load_data(task._ds)
task.post_validate(templar)
self.scope['task'] = task
do_u = do_update_task
def evaluate(self, args):
try:
return eval(args, globals(), self.scope)
except Exception:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def do_pprint(self, args):
"""Pretty Print"""
try:
result = self.evaluate(args)
display.display(pprint.pformat(result))
except Exception:
pass
do_p = do_pprint
def execute(self, args):
try:
code = compile(args + '\n', '<stdin>', 'single')
exec(code, globals(), self.scope)
except Exception:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def default(self, line):
try:
self.execute(line)
except Exception:
pass
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,363 |
Only one handler of a `listen` group is run when notified from another handler
|
### Summary
Hello!
Since ansible-core 2.15 and later (I think since the changes introduced by #79558), it seems that, when multiple handlers of a same `listen` group are notified by another handler, only the first one in the group will be run.
From what I understand, when Ansible handles a `notify` when it is already iterating handlers, it will only consider the first handler that matches the notification, but will not iterate through all the matching handlers. I believe the `break` in `lib/ansible/plugins/strategy/__init__.py` (line 669) should only apply when when Ansible is not already iterating handlers (i.e., in the `else` branch):
https://github.com/ansible/ansible/blob/6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0/lib/ansible/plugins/strategy/__init__.py#L659-L669
I think there seems to be a simple fix to that, which I'm planning to submit as a PR.
Thank you! :)
snip
### Issue Type
Bug Report
### Component Name
handlers
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = None
configured module search path = ['/home/user/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/user/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.6 (main, Nov 14 2023, 09:36:21) [GCC 13.2.1 20230801] (/usr/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = /usr/bin/vim
```
### OS / Environment
Arch Linux, with the following Ansible-related packages :
- ansible 9.0.1-1
- ansible-core 2.16.0-1
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
---
- name: test listen-based handlers with recursive notifications
hosts: localhost
gather_facts: false
tasks:
- name: notify handler 1
command: echo
changed_when: true
notify: handler 1
handlers:
- name: handler 1
debug:
msg: handler 1
changed_when: true
notify: handler_2
- name: handler 2a
debug:
msg: handler 2a
listen: handler_2
- name: handler 2b
debug:
msg: handler 2b
listen: handler_2
```
### Expected Results
All handlers should be run, especially both handlers listening on the `handler_2` notification (i.e., `handler 2a` and `handler 2b`):
```
PLAY [test listen-based handlers with recursive notifications] **************************************************************************************
TASK [notify handler 1] *****************************************************************************************************************************
task path: /tmp/ansible/test.yml:7
Notification for handler handler 1 has been saved.
changed: [localhost] => {"changed": true, "cmd": ["echo"], "delta": "0:00:00.004205", "end": "2023-12-06 10:57:16.556224", "msg": "", "rc": 0, "start": "2023-12-06 10:57:16.552019", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []}
NOTIFIED HANDLER handler 1 for localhost
RUNNING HANDLER [handler 1] *************************************************************************************************************************
task path: /tmp/ansible/test.yml:13
NOTIFIED HANDLER handler 2a for localhost
NOTIFIED HANDLER handler 2b for localhost
changed: [localhost] => {
"msg": "handler 1"
}
RUNNING HANDLER [handler 2a] ************************************************************************************************************************
task path: /tmp/ansible/test.yml:19
ok: [localhost] => {
"msg": "handler 2a"
}
RUNNING HANDLER [handler 2b] ************************************************************************************************************************
task path: /tmp/ansible/test.yml:24
ok: [localhost] => {
"msg": "handler 2b"
}
PLAY RECAP ******************************************************************************************************************************************
localhost : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
PLAY [test listen-based handlers with recursive notifications] ***********************************************************
TASK [notify handler 1] **************************************************************************************************
task path: /tmp/ansible/test.yml:7
Notification for handler handler 1 has been saved.
changed: [localhost] => {"changed": true, "cmd": ["echo"], "delta": "0:00:00.003800", "end": "2023-12-06 10:56:26.513556", "msg": "", "rc": 0, "start": "2023-12-06 10:56:26.509756", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []}
NOTIFIED HANDLER handler 1 for localhost
RUNNING HANDLER [handler 1] **********************************************************************************************
task path: /tmp/ansible/test.yml:13
NOTIFIED HANDLER handler 2a for localhost
changed: [localhost] => {
"msg": "handler 1"
}
RUNNING HANDLER [handler 2a] *********************************************************************************************
task path: /tmp/ansible/test.yml:19
ok: [localhost] => {
"msg": "handler 2a"
}
PLAY RECAP ***************************************************************************************************************
localhost : ok=3 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82363
|
https://github.com/ansible/ansible/pull/82364
|
fe81164fe548d79fbcd0024836d5f7474403c95d
|
83281531216ee64cd054959f2bfe54c6df498443
| 2023-12-06T10:05:01Z |
python
| 2023-12-13T09:56:52Z |
test/integration/targets/handlers/runme.sh
|
#!/usr/bin/env bash
set -eux
export ANSIBLE_FORCE_HANDLERS
ANSIBLE_FORCE_HANDLERS=false
# simple handler test
ansible-playbook test_handlers.yml -i inventory.handlers -v "$@" --tags scenario1
# simple from_handlers test
ansible-playbook from_handlers.yml -i inventory.handlers -v "$@" --tags scenario1
ansible-playbook test_listening_handlers.yml -i inventory.handlers -v "$@"
[ "$(ansible-playbook test_handlers.yml -i inventory.handlers -v "$@" --tags scenario2 -l A \
| grep -E -o 'RUNNING HANDLER \[test_handlers : .*]')" = "RUNNING HANDLER [test_handlers : test handler]" ]
# Test forcing handlers using the linear and free strategy
for strategy in linear free; do
export ANSIBLE_STRATEGY=$strategy
# Not forcing, should only run on successful host
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_B" ]
# Forcing from command line
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing from command line, should only run later tasks on unfailed hosts
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers \
| grep -E -o CALLED_TASK_. | sort | uniq | xargs)" = "CALLED_TASK_B CALLED_TASK_D CALLED_TASK_E" ]
# Forcing from command line, should call handlers even if all hosts fail
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal --force-handlers -e fail_all=yes \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing from ansible.cfg
[ "$(ANSIBLE_FORCE_HANDLERS=true ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags normal \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing true in play
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags force_true_in_play \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_A CALLED_HANDLER_B" ]
# Forcing false in play, which overrides command line
[ "$(ansible-playbook test_force_handlers.yml -i inventory.handlers -v "$@" --tags force_false_in_play --force-handlers \
| grep -E -o CALLED_HANDLER_. | sort | uniq | xargs)" = "CALLED_HANDLER_B" ]
# https://github.com/ansible/ansible/pull/80898
[ "$(ansible-playbook 80880.yml -i inventory.handlers -vv "$@" 2>&1)" ]
unset ANSIBLE_STRATEGY
done
[ "$(ansible-playbook test_handlers_include.yml -i ../../inventory -v "$@" --tags playbook_include_handlers \
| grep -E -o 'RUNNING HANDLER \[.*]')" = "RUNNING HANDLER [test handler]" ]
[ "$(ansible-playbook test_handlers_include.yml -i ../../inventory -v "$@" --tags role_include_handlers \
| grep -E -o 'RUNNING HANDLER \[test_handlers_include : .*]')" = "RUNNING HANDLER [test_handlers_include : test handler]" ]
[ "$(ansible-playbook test_handlers_include_role.yml -i ../../inventory -v "$@" \
| grep -E -o 'RUNNING HANDLER \[test_handlers_include_role : .*]')" = "RUNNING HANDLER [test_handlers_include_role : test handler]" ]
# Notify handler listen
ansible-playbook test_handlers_listen.yml -i inventory.handlers -v "$@"
# Notify inexistent handlers results in error
set +e
result="$(ansible-playbook test_handlers_inexistent_notify.yml -i inventory.handlers "$@" 2>&1)"
set -e
grep -q "ERROR! The requested handler 'notify_inexistent_handler' was not found in either the main handlers list nor in the listening handlers list" <<< "$result"
# Notify inexistent handlers without errors when ANSIBLE_ERROR_ON_MISSING_HANDLER=false
ANSIBLE_ERROR_ON_MISSING_HANDLER=false ansible-playbook test_handlers_inexistent_notify.yml -i inventory.handlers -v "$@"
ANSIBLE_ERROR_ON_MISSING_HANDLER=false ansible-playbook test_templating_in_handlers.yml -v "$@"
# https://github.com/ansible/ansible/issues/36649
output_dir=/tmp
set +e
result="$(ansible-playbook test_handlers_any_errors_fatal.yml -e output_dir=$output_dir -i inventory.handlers -v "$@" 2>&1)"
set -e
[ ! -f $output_dir/should_not_exist_B ] || (rm -f $output_dir/should_not_exist_B && exit 1)
# https://github.com/ansible/ansible/issues/47287
[ "$(ansible-playbook test_handlers_including_task.yml -i ../../inventory -v "$@" | grep -E -o 'failed=[0-9]+')" = "failed=0" ]
# https://github.com/ansible/ansible/issues/71222
ansible-playbook test_role_handlers_including_tasks.yml -i ../../inventory -v "$@"
# https://github.com/ansible/ansible/issues/27237
set +e
result="$(ansible-playbook test_handlers_template_run_once.yml -i inventory.handlers "$@" 2>&1)"
set -e
grep -q "handler A" <<< "$result"
grep -q "handler B" <<< "$result"
# Test an undefined variable in another handler name isn't a failure
ansible-playbook 58841.yml "$@" --tags lazy_evaluation 2>&1 | tee out.txt ; cat out.txt
grep out.txt -e "\[WARNING\]: Handler 'handler name with {{ test_var }}' is unusable"
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "0" ]
# Test templating a handler name with a defined variable
ansible-playbook 58841.yml "$@" --tags evaluation_time -e test_var=myvar | tee out.txt ; cat out.txt
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "1" ]
# Test the handler is not found when the variable is undefined
ansible-playbook 58841.yml "$@" --tags evaluation_time 2>&1 | tee out.txt ; cat out.txt
grep out.txt -e "ERROR! The requested handler 'handler name with myvar' was not found"
grep out.txt -e "\[WARNING\]: Handler 'handler name with {{ test_var }}' is unusable"
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
[ "$(grep out.txt -ce 'handler with var ran')" = "0" ]
# Test include_role and import_role cannot be used as handlers
ansible-playbook test_role_as_handler.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using 'include_role' as a handler is not supported."
# Test notifying a handler from within include_tasks does not work anymore
ansible-playbook test_notify_included.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'I was included')" = "1" ]
grep out.txt -e "ERROR! The requested handler 'handler_from_include' was not found in either the main handlers list nor in the listening handlers list"
ansible-playbook test_handlers_meta.yml -i inventory.handlers -vv "$@" | tee out.txt
[ "$(grep out.txt -ce 'RUNNING HANDLER \[noop_handler\]')" = "1" ]
[ "$(grep out.txt -ce 'META: noop')" = "1" ]
# https://github.com/ansible/ansible/issues/46447
set +e
test "$(ansible-playbook 46447.yml -i inventory.handlers -vv "$@" 2>&1 | grep -c 'SHOULD NOT GET HERE')"
set -e
# https://github.com/ansible/ansible/issues/52561
ansible-playbook 52561.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler1 ran')" = "1" ]
# Test flush_handlers meta task does not imply any_errors_fatal
ansible-playbook 54991.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "4" ]
ansible-playbook order.yml -i inventory.handlers "$@" 2>&1
set +e
ansible-playbook order.yml --force-handlers -e test_force_handlers=true -i inventory.handlers "$@" 2>&1
set -e
ansible-playbook include_handlers_fail_force.yml --force-handlers -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'included handler ran')" = "1" ]
ansible-playbook test_flush_handlers_as_handler.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! flush_handlers cannot be used as a handler"
ansible-playbook test_skip_flush.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "0" ]
ansible-playbook test_flush_in_rescue_always.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran in rescue')" = "1" ]
[ "$(grep out.txt -ce 'handler ran in always')" = "2" ]
[ "$(grep out.txt -ce 'lockstep works')" = "2" ]
ansible-playbook test_handlers_infinite_loop.yml -i inventory.handlers "$@" 2>&1
ansible-playbook test_flush_handlers_rescue_always.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'rescue ran')" = "1" ]
[ "$(grep out.txt -ce 'always ran')" = "2" ]
[ "$(grep out.txt -ce 'should run for both hosts')" = "2" ]
ansible-playbook test_fqcn_meta_flush_handlers.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
grep out.txt -e "handler ran"
grep out.txt -e "after flush"
ansible-playbook 79776.yml -i inventory.handlers "$@"
ansible-playbook test_block_as_handler.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_block_as_handler-include.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_block_as_handler-import.yml "$@" 2>&1 | tee out.txt
grep out.txt -e "ERROR! Using a block as a handler is not supported."
ansible-playbook test_include_role_handler_once.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
ansible-playbook test_listen_role_dedup.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'a handler from a role')" = "1" ]
ansible localhost -m include_role -a "name=r1-dep_chain-vars" "$@"
ansible-playbook test_include_tasks_in_include_role.yml "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran')" = "1" ]
ansible-playbook test_run_once.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'handler ran once')" = "1" ]
ansible-playbook force_handlers_blocks_81533-1.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'task1')" = "1" ]
[ "$(grep out.txt -ce 'task2')" = "1" ]
[ "$(grep out.txt -ce 'hosts_left')" = "1" ]
ansible-playbook force_handlers_blocks_81533-2.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'hosts_left')" = "1" ]
ansible-playbook nested_flush_handlers_failure_force.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'flush_handlers_rescued')" = "1" ]
[ "$(grep out.txt -ce 'flush_handlers_always')" = "2" ]
ansible-playbook 82241.yml -i inventory.handlers "$@" 2>&1 | tee out.txt
[ "$(grep out.txt -ce 'included_task_from_tasks_dir')" = "1" ]
|
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,363 |
Only one handler of a `listen` group is run when notified from another handler
|
### Summary
Hello!
Since ansible-core 2.15 and later (I think since the changes introduced by #79558), it seems that, when multiple handlers of a same `listen` group are notified by another handler, only the first one in the group will be run.
From what I understand, when Ansible handles a `notify` when it is already iterating handlers, it will only consider the first handler that matches the notification, but will not iterate through all the matching handlers. I believe the `break` in `lib/ansible/plugins/strategy/__init__.py` (line 669) should only apply when when Ansible is not already iterating handlers (i.e., in the `else` branch):
https://github.com/ansible/ansible/blob/6ebefaceb6cd0d4961776a94d63a71fc1fc28bc0/lib/ansible/plugins/strategy/__init__.py#L659-L669
I think there seems to be a simple fix to that, which I'm planning to submit as a PR.
Thank you! :)
snip
### Issue Type
Bug Report
### Component Name
handlers
### Ansible Version
```console
$ ansible --version
ansible [core 2.16.0]
config file = None
configured module search path = ['/home/user/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/lib/python3.11/site-packages/ansible
ansible collection location = /home/user/.ansible/collections:/usr/share/ansible/collections
executable location = /usr/bin/ansible
python version = 3.11.6 (main, Nov 14 2023, 09:36:21) [GCC 13.2.1 20230801] (/usr/bin/python)
jinja version = 3.1.2
libyaml = True
```
### Configuration
```console
# if using a version older than ansible-core 2.12 you should omit the '-t all'
$ ansible-config dump --only-changed -t all
CONFIG_FILE() = None
EDITOR(env: EDITOR) = /usr/bin/vim
```
### OS / Environment
Arch Linux, with the following Ansible-related packages :
- ansible 9.0.1-1
- ansible-core 2.16.0-1
### Steps to Reproduce
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
---
- name: test listen-based handlers with recursive notifications
hosts: localhost
gather_facts: false
tasks:
- name: notify handler 1
command: echo
changed_when: true
notify: handler 1
handlers:
- name: handler 1
debug:
msg: handler 1
changed_when: true
notify: handler_2
- name: handler 2a
debug:
msg: handler 2a
listen: handler_2
- name: handler 2b
debug:
msg: handler 2b
listen: handler_2
```
### Expected Results
All handlers should be run, especially both handlers listening on the `handler_2` notification (i.e., `handler 2a` and `handler 2b`):
```
PLAY [test listen-based handlers with recursive notifications] **************************************************************************************
TASK [notify handler 1] *****************************************************************************************************************************
task path: /tmp/ansible/test.yml:7
Notification for handler handler 1 has been saved.
changed: [localhost] => {"changed": true, "cmd": ["echo"], "delta": "0:00:00.004205", "end": "2023-12-06 10:57:16.556224", "msg": "", "rc": 0, "start": "2023-12-06 10:57:16.552019", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []}
NOTIFIED HANDLER handler 1 for localhost
RUNNING HANDLER [handler 1] *************************************************************************************************************************
task path: /tmp/ansible/test.yml:13
NOTIFIED HANDLER handler 2a for localhost
NOTIFIED HANDLER handler 2b for localhost
changed: [localhost] => {
"msg": "handler 1"
}
RUNNING HANDLER [handler 2a] ************************************************************************************************************************
task path: /tmp/ansible/test.yml:19
ok: [localhost] => {
"msg": "handler 2a"
}
RUNNING HANDLER [handler 2b] ************************************************************************************************************************
task path: /tmp/ansible/test.yml:24
ok: [localhost] => {
"msg": "handler 2b"
}
PLAY RECAP ******************************************************************************************************************************************
localhost : ok=4 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Actual Results
```console
PLAY [test listen-based handlers with recursive notifications] ***********************************************************
TASK [notify handler 1] **************************************************************************************************
task path: /tmp/ansible/test.yml:7
Notification for handler handler 1 has been saved.
changed: [localhost] => {"changed": true, "cmd": ["echo"], "delta": "0:00:00.003800", "end": "2023-12-06 10:56:26.513556", "msg": "", "rc": 0, "start": "2023-12-06 10:56:26.509756", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []}
NOTIFIED HANDLER handler 1 for localhost
RUNNING HANDLER [handler 1] **********************************************************************************************
task path: /tmp/ansible/test.yml:13
NOTIFIED HANDLER handler 2a for localhost
changed: [localhost] => {
"msg": "handler 1"
}
RUNNING HANDLER [handler 2a] *********************************************************************************************
task path: /tmp/ansible/test.yml:19
ok: [localhost] => {
"msg": "handler 2a"
}
PLAY RECAP ***************************************************************************************************************
localhost : ok=3 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82363
|
https://github.com/ansible/ansible/pull/82364
|
fe81164fe548d79fbcd0024836d5f7474403c95d
|
83281531216ee64cd054959f2bfe54c6df498443
| 2023-12-06T10:05:01Z |
python
| 2023-12-13T09:56:52Z |
test/integration/targets/handlers/test_multiple_handlers_with_recursive_notification.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,353 |
ansible-test sanity should allow multiple documents in EXAMPLES
|
### Summary
EXAMPLES are intended to be copy-and-paste ready.
While most of the documentation is expected to be a single document, it's reasonable to expect that within examples (especially for inventory plugins), there may be multiple documents in there. If only a single document is permitted, then when multiple examples are added for an inventory plugin, attempting to lint the YAML results in key-duplicates errors.
See also https://github.com/ansible/ansible-lint/issues/3860
### Issue Type
Feature Idea
### Component Name
ansible-test
### Additional Information
```yaml (paste below)
EXAMPLES = r"""
---
# Example using groups to assign the running hosts to a group based on vpc_id
plugin: amazon.aws.aws_ec2
profile: aws_profile
# Populate inventory with instances in these regions
regions:
- us-east-2
filters:
# All instances with their state as `running`
instance-state-name: running
keyed_groups:
- prefix: tag
key: tags
compose:
ansible_host: public_dns_name
groups:
libvpc: vpc_id == 'vpc-####'
---
# Define prefix and suffix for host variables coming from AWS.
plugin: amazon.aws.aws_ec2
regions:
- us-east-1
hostvars_prefix: 'aws_'
hostvars_suffix: '_ec2'
"""
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82353
|
https://github.com/ansible/ansible/pull/82355
|
83281531216ee64cd054959f2bfe54c6df498443
|
5346009d2cfab0dcbde675b875a06d2d86b962c5
| 2023-12-05T06:12:52Z |
python
| 2023-12-13T20:18:35Z |
changelogs/fragments/82353-ansible-sanity-examples.yml
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,353 |
ansible-test sanity should allow multiple documents in EXAMPLES
|
### Summary
EXAMPLES are intended to be copy-and-paste ready.
While most of the documentation is expected to be a single document, it's reasonable to expect that within examples (especially for inventory plugins), there may be multiple documents in there. If only a single document is permitted, then when multiple examples are added for an inventory plugin, attempting to lint the YAML results in key-duplicates errors.
See also https://github.com/ansible/ansible-lint/issues/3860
### Issue Type
Feature Idea
### Component Name
ansible-test
### Additional Information
```yaml (paste below)
EXAMPLES = r"""
---
# Example using groups to assign the running hosts to a group based on vpc_id
plugin: amazon.aws.aws_ec2
profile: aws_profile
# Populate inventory with instances in these regions
regions:
- us-east-2
filters:
# All instances with their state as `running`
instance-state-name: running
keyed_groups:
- prefix: tag
key: tags
compose:
ansible_host: public_dns_name
groups:
libvpc: vpc_id == 'vpc-####'
---
# Define prefix and suffix for host variables coming from AWS.
plugin: amazon.aws.aws_ec2
regions:
- us-east-1
hostvars_prefix: 'aws_'
hostvars_suffix: '_ec2'
"""
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82353
|
https://github.com/ansible/ansible/pull/82355
|
83281531216ee64cd054959f2bfe54c6df498443
|
5346009d2cfab0dcbde675b875a06d2d86b962c5
| 2023-12-05T06:12:52Z |
python
| 2023-12-13T20:18:35Z |
test/integration/targets/ansible-test-sanity-yamllint/aliases
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,353 |
ansible-test sanity should allow multiple documents in EXAMPLES
|
### Summary
EXAMPLES are intended to be copy-and-paste ready.
While most of the documentation is expected to be a single document, it's reasonable to expect that within examples (especially for inventory plugins), there may be multiple documents in there. If only a single document is permitted, then when multiple examples are added for an inventory plugin, attempting to lint the YAML results in key-duplicates errors.
See also https://github.com/ansible/ansible-lint/issues/3860
### Issue Type
Feature Idea
### Component Name
ansible-test
### Additional Information
```yaml (paste below)
EXAMPLES = r"""
---
# Example using groups to assign the running hosts to a group based on vpc_id
plugin: amazon.aws.aws_ec2
profile: aws_profile
# Populate inventory with instances in these regions
regions:
- us-east-2
filters:
# All instances with their state as `running`
instance-state-name: running
keyed_groups:
- prefix: tag
key: tags
compose:
ansible_host: public_dns_name
groups:
libvpc: vpc_id == 'vpc-####'
---
# Define prefix and suffix for host variables coming from AWS.
plugin: amazon.aws.aws_ec2
regions:
- us-east-1
hostvars_prefix: 'aws_'
hostvars_suffix: '_ec2'
"""
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82353
|
https://github.com/ansible/ansible/pull/82355
|
83281531216ee64cd054959f2bfe54c6df498443
|
5346009d2cfab0dcbde675b875a06d2d86b962c5
| 2023-12-05T06:12:52Z |
python
| 2023-12-13T20:18:35Z |
test/integration/targets/ansible-test-sanity-yamllint/ansible_collections/ns/col/plugins/inventory/inventory1.py
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,353 |
ansible-test sanity should allow multiple documents in EXAMPLES
|
### Summary
EXAMPLES are intended to be copy-and-paste ready.
While most of the documentation is expected to be a single document, it's reasonable to expect that within examples (especially for inventory plugins), there may be multiple documents in there. If only a single document is permitted, then when multiple examples are added for an inventory plugin, attempting to lint the YAML results in key-duplicates errors.
See also https://github.com/ansible/ansible-lint/issues/3860
### Issue Type
Feature Idea
### Component Name
ansible-test
### Additional Information
```yaml (paste below)
EXAMPLES = r"""
---
# Example using groups to assign the running hosts to a group based on vpc_id
plugin: amazon.aws.aws_ec2
profile: aws_profile
# Populate inventory with instances in these regions
regions:
- us-east-2
filters:
# All instances with their state as `running`
instance-state-name: running
keyed_groups:
- prefix: tag
key: tags
compose:
ansible_host: public_dns_name
groups:
libvpc: vpc_id == 'vpc-####'
---
# Define prefix and suffix for host variables coming from AWS.
plugin: amazon.aws.aws_ec2
regions:
- us-east-1
hostvars_prefix: 'aws_'
hostvars_suffix: '_ec2'
"""
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82353
|
https://github.com/ansible/ansible/pull/82355
|
83281531216ee64cd054959f2bfe54c6df498443
|
5346009d2cfab0dcbde675b875a06d2d86b962c5
| 2023-12-05T06:12:52Z |
python
| 2023-12-13T20:18:35Z |
test/integration/targets/ansible-test-sanity-yamllint/ansible_collections/ns/col/plugins/modules/module1.py
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,353 |
ansible-test sanity should allow multiple documents in EXAMPLES
|
### Summary
EXAMPLES are intended to be copy-and-paste ready.
While most of the documentation is expected to be a single document, it's reasonable to expect that within examples (especially for inventory plugins), there may be multiple documents in there. If only a single document is permitted, then when multiple examples are added for an inventory plugin, attempting to lint the YAML results in key-duplicates errors.
See also https://github.com/ansible/ansible-lint/issues/3860
### Issue Type
Feature Idea
### Component Name
ansible-test
### Additional Information
```yaml (paste below)
EXAMPLES = r"""
---
# Example using groups to assign the running hosts to a group based on vpc_id
plugin: amazon.aws.aws_ec2
profile: aws_profile
# Populate inventory with instances in these regions
regions:
- us-east-2
filters:
# All instances with their state as `running`
instance-state-name: running
keyed_groups:
- prefix: tag
key: tags
compose:
ansible_host: public_dns_name
groups:
libvpc: vpc_id == 'vpc-####'
---
# Define prefix and suffix for host variables coming from AWS.
plugin: amazon.aws.aws_ec2
regions:
- us-east-1
hostvars_prefix: 'aws_'
hostvars_suffix: '_ec2'
"""
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82353
|
https://github.com/ansible/ansible/pull/82355
|
83281531216ee64cd054959f2bfe54c6df498443
|
5346009d2cfab0dcbde675b875a06d2d86b962c5
| 2023-12-05T06:12:52Z |
python
| 2023-12-13T20:18:35Z |
test/integration/targets/ansible-test-sanity-yamllint/expected.txt
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,353 |
ansible-test sanity should allow multiple documents in EXAMPLES
|
### Summary
EXAMPLES are intended to be copy-and-paste ready.
While most of the documentation is expected to be a single document, it's reasonable to expect that within examples (especially for inventory plugins), there may be multiple documents in there. If only a single document is permitted, then when multiple examples are added for an inventory plugin, attempting to lint the YAML results in key-duplicates errors.
See also https://github.com/ansible/ansible-lint/issues/3860
### Issue Type
Feature Idea
### Component Name
ansible-test
### Additional Information
```yaml (paste below)
EXAMPLES = r"""
---
# Example using groups to assign the running hosts to a group based on vpc_id
plugin: amazon.aws.aws_ec2
profile: aws_profile
# Populate inventory with instances in these regions
regions:
- us-east-2
filters:
# All instances with their state as `running`
instance-state-name: running
keyed_groups:
- prefix: tag
key: tags
compose:
ansible_host: public_dns_name
groups:
libvpc: vpc_id == 'vpc-####'
---
# Define prefix and suffix for host variables coming from AWS.
plugin: amazon.aws.aws_ec2
regions:
- us-east-1
hostvars_prefix: 'aws_'
hostvars_suffix: '_ec2'
"""
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82353
|
https://github.com/ansible/ansible/pull/82355
|
83281531216ee64cd054959f2bfe54c6df498443
|
5346009d2cfab0dcbde675b875a06d2d86b962c5
| 2023-12-05T06:12:52Z |
python
| 2023-12-13T20:18:35Z |
test/integration/targets/ansible-test-sanity-yamllint/runme.sh
| |
closed
|
ansible/ansible
|
https://github.com/ansible/ansible
| 82,353 |
ansible-test sanity should allow multiple documents in EXAMPLES
|
### Summary
EXAMPLES are intended to be copy-and-paste ready.
While most of the documentation is expected to be a single document, it's reasonable to expect that within examples (especially for inventory plugins), there may be multiple documents in there. If only a single document is permitted, then when multiple examples are added for an inventory plugin, attempting to lint the YAML results in key-duplicates errors.
See also https://github.com/ansible/ansible-lint/issues/3860
### Issue Type
Feature Idea
### Component Name
ansible-test
### Additional Information
```yaml (paste below)
EXAMPLES = r"""
---
# Example using groups to assign the running hosts to a group based on vpc_id
plugin: amazon.aws.aws_ec2
profile: aws_profile
# Populate inventory with instances in these regions
regions:
- us-east-2
filters:
# All instances with their state as `running`
instance-state-name: running
keyed_groups:
- prefix: tag
key: tags
compose:
ansible_host: public_dns_name
groups:
libvpc: vpc_id == 'vpc-####'
---
# Define prefix and suffix for host variables coming from AWS.
plugin: amazon.aws.aws_ec2
regions:
- us-east-1
hostvars_prefix: 'aws_'
hostvars_suffix: '_ec2'
"""
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
|
https://github.com/ansible/ansible/issues/82353
|
https://github.com/ansible/ansible/pull/82355
|
83281531216ee64cd054959f2bfe54c6df498443
|
5346009d2cfab0dcbde675b875a06d2d86b962c5
| 2023-12-05T06:12:52Z |
python
| 2023-12-13T20:18:35Z |
test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
|
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
from __future__ import annotations
import ast
import json
import os
import re
import sys
import typing as t
import yaml
from yaml.resolver import Resolver
from yaml.constructor import SafeConstructor
from yaml.error import MarkedYAMLError
from yaml.cyaml import CParser
from yamllint import linter
from yamllint.config import YamlLintConfig
def main():
"""Main program body."""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
checker = YamlChecker()
checker.check(paths)
checker.report()
class TestConstructor(SafeConstructor):
"""Yaml Safe Constructor that knows about Ansible tags."""
def construct_yaml_unsafe(self, node):
"""Construct an unsafe tag."""
try:
constructor = getattr(node, 'id', 'object')
if constructor is not None:
constructor = getattr(self, 'construct_%s' % constructor)
except AttributeError:
constructor = self.construct_object
value = constructor(node)
return value
TestConstructor.add_constructor(
'!unsafe',
TestConstructor.construct_yaml_unsafe)
TestConstructor.add_constructor(
'!vault',
TestConstructor.construct_yaml_str)
TestConstructor.add_constructor(
'!vault-encrypted',
TestConstructor.construct_yaml_str)
class TestLoader(CParser, TestConstructor, Resolver):
"""Custom YAML loader that recognizes custom Ansible tags."""
def __init__(self, stream):
CParser.__init__(self, stream)
TestConstructor.__init__(self)
Resolver.__init__(self)
class YamlChecker:
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
def __init__(self):
self.messages = []
def report(self):
"""Print yamllint report to stdout."""
report = dict(
messages=self.messages,
)
print(json.dumps(report, indent=4, sort_keys=True))
def check(self, paths): # type: (t.List[str]) -> None
"""Check the specified paths."""
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config')
yaml_conf = YamlLintConfig(file=os.path.join(config_path, 'default.yml'))
module_conf = YamlLintConfig(file=os.path.join(config_path, 'modules.yml'))
plugin_conf = YamlLintConfig(file=os.path.join(config_path, 'plugins.yml'))
for path in paths:
extension = os.path.splitext(path)[1]
with open(path, encoding='utf-8') as file:
contents = file.read()
if extension in ('.yml', '.yaml'):
self.check_yaml(yaml_conf, path, contents)
elif extension == '.py':
if path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/'):
conf = module_conf
else:
conf = plugin_conf
self.check_module(conf, path, contents)
else:
raise Exception('unsupported extension: %s' % extension)
def check_yaml(self, conf, path, contents): # type: (YamlLintConfig, str, str) -> None
"""Check the given YAML."""
self.check_parsable(path, contents)
self.messages += [self.result_to_message(r, path) for r in linter.run(contents, conf, path)]
def check_module(self, conf, path, contents): # type: (YamlLintConfig, str, str) -> None
"""Check the given module."""
docs = self.get_module_docs(path, contents)
for key, value in docs.items():
yaml_data = value['yaml']
lineno = value['lineno']
fmt = value['fmt']
if fmt != 'yaml':
continue
if yaml_data.startswith('\n'):
yaml_data = yaml_data[1:]
lineno += 1
self.check_parsable(path, yaml_data, lineno)
messages = list(linter.run(yaml_data, conf, path))
self.messages += [self.result_to_message(r, path, lineno - 1, key) for r in messages]
def check_parsable(self, path, contents, lineno=1): # type: (str, str, int) -> None
"""Check the given contents to verify they can be parsed as YAML."""
try:
yaml.load(contents, Loader=TestLoader)
except MarkedYAMLError as ex:
self.messages += [{'code': 'unparsable-with-libyaml',
'message': '%s - %s' % (ex.args[0], ex.args[2]),
'path': path,
'line': ex.problem_mark.line + lineno,
'column': ex.problem_mark.column + 1,
'level': 'error',
}]
@staticmethod
def result_to_message(result, path, line_offset=0, prefix=''): # type: (t.Any, str, int, str) -> t.Dict[str, t.Any]
"""Convert the given result to a dictionary and return it."""
if prefix:
prefix = '%s: ' % prefix
return dict(
code=result.rule or result.level,
message=prefix + result.desc,
path=path,
line=result.line + line_offset,
column=result.column,
level=result.level,
)
def get_module_docs(self, path, contents): # type: (str, str) -> t.Dict[str, t.Any]
"""Return the module documentation for the given module contents."""
module_doc_types = [
'DOCUMENTATION',
'EXAMPLES',
'RETURN',
]
docs = {}
fmt_re = re.compile(r'^# fmt:\s+(\S+)')
def check_assignment(statement, doc_types=None):
"""Check the given statement for a documentation assignment."""
for target in statement.targets:
if not isinstance(target, ast.Name):
continue
if doc_types and target.id not in doc_types:
continue
fmt_match = fmt_re.match(statement.value.value.lstrip())
fmt = 'yaml'
if fmt_match:
fmt = fmt_match.group(1)
docs[target.id] = dict(
yaml=statement.value.value,
lineno=statement.lineno,
end_lineno=statement.lineno + len(statement.value.value.splitlines()),
fmt=fmt.lower(),
)
module_ast = self.parse_module(path, contents)
if not module_ast:
return {}
is_plugin = path.startswith('lib/ansible/modules/') or path.startswith('lib/ansible/plugins/') or path.startswith('plugins/')
is_doc_fragment = path.startswith('lib/ansible/plugins/doc_fragments/') or path.startswith('plugins/doc_fragments/')
if is_plugin and not is_doc_fragment:
for body_statement in module_ast.body:
if isinstance(body_statement, ast.Assign):
check_assignment(body_statement, module_doc_types)
elif is_doc_fragment:
for body_statement in module_ast.body:
if isinstance(body_statement, ast.ClassDef):
for class_statement in body_statement.body:
if isinstance(class_statement, ast.Assign):
check_assignment(class_statement)
else:
raise Exception('unsupported path: %s' % path)
return docs
def parse_module(self, path, contents): # type: (str, str) -> t.Optional[ast.Module]
"""Parse the given contents and return a module if successful, otherwise return None."""
try:
return ast.parse(contents)
except SyntaxError as ex:
self.messages.append(dict(
code='python-syntax-error',
message=str(ex),
path=path,
line=ex.lineno,
column=ex.offset,
level='error',
))
except Exception as ex: # pylint: disable=broad-except
self.messages.append(dict(
code='python-parse-error',
message=str(ex),
path=path,
line=0,
column=0,
level='error',
))
return None
if __name__ == '__main__':
main()
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 940 |
Support explicit dependency
|
Sometimes the implicit dependency of the dag is not enough, because a component `A` does not have any useful output for component `B`. However `A` must be executed before `B`.
In such a scenario, it would be useful to have the ability to define a dependency explicitly.
|
https://github.com/dagger/dagger/issues/940
|
https://github.com/dagger/dagger/pull/123
|
9dbec2030cd799ef3d3ba61d4f17d19a3e6f7235
|
b1626033dead786076c6f83248a3afcba9e64953
| 2021-09-03T00:10:54Z |
go
| 2021-02-17T21:28:18Z |
go.mod
|
module dagger.cloud/go
go 1.16
require (
cuelang.org/go v0.3.0-beta.4
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db
github.com/containerd/console v1.0.1
github.com/emicklei/proto v1.9.0 // indirect
github.com/moby/buildkit v0.8.1
github.com/opencontainers/go-digest v1.0.0
github.com/rs/zerolog v1.20.0
github.com/spf13/cobra v1.0.0
github.com/spf13/pflag v1.0.5
github.com/spf13/viper v1.7.0
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c // indirect
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221
golang.org/x/tools v0.1.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86
)
replace (
// protobuf: corresponds to containerd
github.com/golang/protobuf => github.com/golang/protobuf v1.3.5
github.com/hashicorp/go-immutable-radix => github.com/tonistiigi/go-immutable-radix v0.0.0-20170803185627-826af9ccf0fe
github.com/jaguilar/vt100 => github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305
// genproto: corresponds to containerd
google.golang.org/genproto => google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63
)
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 940 |
Support explicit dependency
|
Sometimes the implicit dependency of the dag is not enough, because a component `A` does not have any useful output for component `B`. However `A` must be executed before `B`.
In such a scenario, it would be useful to have the ability to define a dependency explicitly.
|
https://github.com/dagger/dagger/issues/940
|
https://github.com/dagger/dagger/pull/123
|
9dbec2030cd799ef3d3ba61d4f17d19a3e6f7235
|
b1626033dead786076c6f83248a3afcba9e64953
| 2021-09-03T00:10:54Z |
go
| 2021-02-17T21:28:18Z |
go.sum
|
bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
cloud.google.com/go v0.25.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.37.2/go.mod h1:H8IAquKe2L30IxoupDgqTaQvKSwF/c8prYHynGIWQbA=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.39.0/go.mod h1:rVLT6fkc8chs9sfPtFc1SBH6em7n+ZoXaG+87tDISts=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.51.0/go.mod h1:hWtGJ6gnXH+KgDv+V0zFGDvpi07n3z8ZNj3T1RW0Gcw=
cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
code.gitea.io/sdk/gitea v0.12.0/go.mod h1:z3uwDV/b9Ls47NGukYM9XhnHtqPh/J+t40lsUrR6JDY=
contrib.go.opencensus.io/exporter/aws v0.0.0-20181029163544-2befc13012d0/go.mod h1:uu1P0UCM/6RbsMrgPa98ll8ZcHM858i/AD06a9aLRCA=
contrib.go.opencensus.io/exporter/ocagent v0.5.0/go.mod h1:ImxhfLRpxoYiSq891pBrLVhN+qmP8BTVvdH2YLs7Gl0=
contrib.go.opencensus.io/exporter/stackdriver v0.12.1/go.mod h1:iwB6wGarfphGGe/e5CWqyUk/cLzKnWsOKPVW3no6OTw=
contrib.go.opencensus.io/integrations/ocsql v0.1.4/go.mod h1:8DsSdjz3F+APR+0z0WkU1aRorQCFfRxvqjUUPMbF3fE=
contrib.go.opencensus.io/resource v0.1.1/go.mod h1:F361eGI91LCmW1I/Saf+rX0+OFcigGlFvXwEGEnkRLA=
cuelang.org/go v0.3.0-beta.4 h1:NjjbmTT8zW/kqZsq8Is1WsevvpsCVPzRdeaH3QDRuJM=
cuelang.org/go v0.3.0-beta.4/go.mod h1:Ikvs157igkGV5gFUdYSFa+lWp/CDteVhubPTXyvPRtA=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
git.apache.org/thrift.git v0.12.0/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
github.com/AkihiroSuda/containerd-fuse-overlayfs v1.0.0/go.mod h1:0mMDvQFeLbbn1Wy8P2j3hwFhqBq+FKn8OZPno8WLmp8=
github.com/Azure/azure-amqp-common-go/v2 v2.1.0/go.mod h1:R8rea+gJRuJR6QxTir/XuEd+YuKoUiazDC/N96FiDEU=
github.com/Azure/azure-pipeline-go v0.2.1/go.mod h1:UGSo8XybXnIGZ3epmeBw7Jdz+HiUVpqIlpz/HKHylF4=
github.com/Azure/azure-pipeline-go v0.2.2/go.mod h1:4rQ/NZncSvGqNkkOsNpOU1tgoNuIlp9AfUH5G1tvCHc=
github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v19.1.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v29.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v30.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v35.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v38.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v42.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-service-bus-go v0.9.1/go.mod h1:yzBx6/BUGfjfeqbRZny9AQIbIe3AcV9WZbAdpkoXOa0=
github.com/Azure/azure-storage-blob-go v0.8.0/go.mod h1:lPI3aLPpuLTeUwh1sViKXFxwl2B6teiRqI0deQUvsw0=
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8=
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v10.15.5+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v12.0.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v14.1.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0=
github.com/Azure/go-autorest/autorest v0.9.6/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630=
github.com/Azure/go-autorest/autorest v0.10.2/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630=
github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc=
github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/adal v0.8.2/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/adal v0.8.3/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM=
github.com/Azure/go-autorest/autorest/azure/cli v0.3.1/go.mod h1:ZG5p860J94/0kI9mNJVoIoLgXcirM2gF5i2kWloofxw=
github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g=
github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM=
github.com/Azure/go-autorest/autorest/to v0.2.0/go.mod h1:GunWKJp1AEqgMaGLV+iocmRAJWqST1wQYhyyjXJ3SJc=
github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA=
github.com/Azure/go-autorest/autorest/validation v0.1.0/go.mod h1:Ha3z/SqBeaalWQvokg3NZAlQTalVMtOIAs1aGK7G6u8=
github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI=
github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Djarvur/go-err113 v0.0.0-20200410182137-af658d038157/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
github.com/Djarvur/go-err113 v0.1.0/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
github.com/GoogleCloudPlatform/cloudsql-proxy v0.0.0-20191009163259-e802c2cb94ae/go.mod h1:mjwGPas4yKduTyubHvD1Atl9r1rUq8DfVy+gkVvZ+oo=
github.com/GoogleCloudPlatform/k8s-cloud-provider v0.0.0-20190822182118-27a4ced34534/go.mod h1:iroGtC8B3tQiqtds1l+mgk/BBOrxbqjH+eUfFQYRc14=
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db h1:Zkf5kwhxdW0xV7WM/crqIcOP5LCFGnAmumWSFAewJ74=
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db/go.mod h1:RU+6d0CNIRSp6yo1mXLIIrnFa/3LHhvcDVLVJyovptM=
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/Masterminds/semver/v3 v3.0.3/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/Masterminds/semver/v3 v3.1.0/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/go-winio v0.4.15-0.20200908182639-5b44b70ab3ab/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/go-winio v0.4.15 h1:qkLXKzb1QoVatRyd/YlXZ/Kg0m5K3SPuoD82jjSOaBc=
github.com/Microsoft/go-winio v0.4.15/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ=
github.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg38RRsjT5y8=
github.com/Microsoft/hcsshim v0.8.10 h1:k5wTrpnVU2/xv8ZuzGkbXVd3js5zJ8RnumPo5RxiIxU=
github.com/Microsoft/hcsshim v0.8.10/go.mod h1:g5uw8EV2mAlzqe94tfNBNdr89fnbD/n3HV0OhsddkmM=
github.com/Microsoft/hcsshim/test v0.0.0-20200826032352-301c83a30e7c/go.mod h1:30A5igQ91GEmhYJF8TaRP79pMBOYynRsyOByfVV0dU4=
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM=
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/alecthomas/kingpin v2.2.6+incompatible/go.mod h1:59OFYbFVLKQKq+mqrL6Rw5bR0c3ACQaawgXx0QYndlE=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apex/log v1.1.4/go.mod h1:AlpoD9aScyQfJDVHmLMEcx4oU6LqzkWp4Mg9GdAcEvQ=
github.com/apex/log v1.3.0/go.mod h1:jd8Vpsr46WAe3EZSQ/IUMs2qQD/GOycT5rPWCO1yGcs=
github.com/apex/logs v0.0.4/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo=
github.com/aphistic/golf v0.0.0-20180712155816-02c07f170c5a/go.mod h1:3NqKYiepwy8kCu4PNA+aP7WUV72eXWJeP9/r3/K9aLE=
github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3stzu0Xys=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.15.90/go.mod h1:es1KtYUFs7le0xQ3rOihkuoVD90z7D0fR2Qm4S00/gU=
github.com/aws/aws-sdk-go v1.16.26/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.18/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.45/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.25.11/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.27.1/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.31.6/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA=
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb/go.mod h1:PkYb9DJNAwrSvRx5DYA+gUcOIgTGVMNkfSCbZM8cWpI=
github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bombsimon/wsl/v2 v2.0.0/go.mod h1:mf25kr/SqFEPhhcxW1+7pxzGlW+hIl/hYTKY95VwV8U=
github.com/bombsimon/wsl/v2 v2.2.0/go.mod h1:Azh8c3XGEJl9LyX0/sFC+CKMc7Ssgua0g+6abzXN4Pg=
github.com/bombsimon/wsl/v3 v3.0.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc=
github.com/bombsimon/wsl/v3 v3.1.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk=
github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8=
github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50=
github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE=
github.com/caarlos0/ctrlc v1.0.0/go.mod h1:CdXpj4rmq0q/1Eb44M9zi2nKB0QraNKuRGYGrrHhcQw=
github.com/campoy/unique v0.0.0-20180121183637-88950e537e7e/go.mod h1:9IOqJGCPMSc6E5ydlp5NIonxObaeu/Iub/X03EKPVYo=
github.com/cavaliercoder/go-cpio v0.0.0-20180626203310-925f9528c45e/go.mod h1:oDpT4efm8tSYHXV5tHSdRvBet/b/QzxZ+XyyPehvm3A=
github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg=
github.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLIdUjrmSXlK9pkrsDlLHbO8jiB8X8JnOc=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/cockroachdb/apd/v2 v2.0.1 h1:y1Rh3tEU89D+7Tgbw+lp52T6p/GJLpDmNvr10UWqLTE=
github.com/cockroachdb/apd/v2 v2.0.1/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw=
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
github.com/codahale/hdrhistogram v0.0.0-20160425231609-f8ad88b59a58/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko=
github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM=
github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340 h1:9atoWyI9RtXFwf7UDbme/6M8Ud0rFrx+Q3ZWgSnsxtw=
github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo=
github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw=
github.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE=
github.com/containerd/console v1.0.0/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE=
github.com/containerd/console v1.0.1 h1:u7SFAJyRqWcG6ogaMAx3KjSTy1e3hT9QxqX7Jco7dRc=
github.com/containerd/console v1.0.1/go.mod h1:XUsP6YE/mKtz6bxc+I8UiKKTP04qjQL4qcS3XoQ5xkw=
github.com/containerd/containerd v1.3.0-beta.2.0.20190828155532-0293cbd26c69/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.3.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.4.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.4.1-0.20201117152358-0edc412565dc h1:XbZ/DDsFDigeOQ9M3YXhvE6d1AEHdxKAzIgkswip7dI=
github.com/containerd/containerd v1.4.1-0.20201117152358-0edc412565dc/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=
github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe h1:PEmIrUvwG9Yyv+0WKZqjXfSFDeZjs/q15g0m08BYS9k=
github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo=
github.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI=
github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b h1:qUtCegLdOUVfVJOw+KDg6eJyE1TGvLlkGEd1091kSSQ=
github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0=
github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU=
github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0=
github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g=
github.com/containerd/go-runc v0.0.0-20201020171139-16b287bc67d0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok=
github.com/containerd/stargz-snapshotter v0.0.0-20201027054423-3a04e4c2c116/go.mod h1:o59b3PCKVAf9jjiKtCc/9hLAd+5p/rfhBfm6aBcTEr4=
github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o=
github.com/containerd/ttrpc v1.0.1 h1:IfVOxKbjyBn9maoye2JN95pgGYOmPkQVqxtOu7rtNIc=
github.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y=
github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc=
github.com/containerd/typeurl v1.0.1 h1:PvuK4E3D5S5q6IqsPDCy928FhP0LUIGcmZ/Yhgp5Djw=
github.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg=
github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY=
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/coreos/pkg v0.0.0-20180108230652-97fdf19511ea/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=
github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0=
github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY=
github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=
github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=
github.com/docker/cli v0.0.0-20190925022749-754388324470/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v20.10.0-beta1.0.20201029214301-1d20b15adc38+incompatible h1:r99CiNpN5pxrSuSH36suYxrbLxFOhBvQ0sEH6624MHs=
github.com/docker/cli v20.10.0-beta1.0.20201029214301-1d20b15adc38+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY=
github.com/docker/distribution v2.6.0-rc.1.0.20180327202408-83389a148052+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v0.0.0-20200511152416-a93e9eb0e95c/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v0.7.3-0.20190327010347-be7ac8be2ae0/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v1.4.2-0.20180531152204-71cd53e4a197/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v17.12.0-ce-rc1.0.20200730172259-9f28837c1d93+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v20.10.0-beta1.0.20201110211921-af34b94a78a1+incompatible h1:J2OhsbfqoBRRT048iD/tqXBvEQWQATQ8vew6LqQmDSU=
github.com/docker/docker v20.10.0-beta1.0.20201110211921-af34b94a78a1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c h1:+pKlWGMw7gf6bQ+oDZB4KHQFypsfjYlq/C4rfL7D3g8=
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=
github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI=
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/libnetwork v0.8.0-dev.2.0.20200917202933-d0951081b35f h1:jC/ZXgYdzCUuKFkKGNiekhnIkGfUrdelEqvg4Miv440=
github.com/docker/libnetwork v0.8.0-dev.2.0.20200917202933-d0951081b35f/go.mod h1:93m0aTqz6z+g32wla4l4WxTrdtvBRmVzYRkYvasA5Z8=
github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE=
github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/proto v1.6.15/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
github.com/emicklei/proto v1.9.0 h1:l0QiNT6Qs7Yj0Mb4X6dnWBQer4ebei2BFcgQLbGqUDc=
github.com/emicklei/proto v1.9.0/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/fortytw2/leaktest v1.2.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-critic/go-critic v0.4.1/go.mod h1:7/14rZGnZbY6E38VEGk2kVhoq6itzc1E68facVDK23g=
github.com/go-critic/go-critic v0.4.3/go.mod h1:j4O3D4RoIwRqlZw5jJpx0BNfXWWbpcJoKu5cYSe4YmQ=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-lintpack/lintpack v0.5.2/go.mod h1:NwZuYi2nUHho8XEIZ6SIxihrnPoqBTDqfpXvXAN0sXM=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8=
github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo=
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4=
github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ=
github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY=
github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY=
github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086/go.mod h1:mP93XdblcopXwlyN4X4uodxXQhldPGZbcEJIimQHrkg=
github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw=
github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU=
github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30/go.mod h1:SV2ur98SGypH1UjcPpCatrV5hPazG6+IfNHbkDXBRrk=
github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI=
github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8/go.mod h1:WoMrjiy4zvdS+Bg6z9jZH82QXwkcgCBX6nOfnmdaHks=
github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc=
github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gofrs/flock v0.0.0-20190320160742-5135e617513b/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
github.com/gofrs/flock v0.7.3 h1:I0EKY9l8HZCXTMYC4F80vwT6KNypV9uYKP3Alm/hjmQ=
github.com/gofrs/flock v0.7.3/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
github.com/gogo/googleapis v1.2.0/go.mod h1:Njal3psf3qN6dwBtQfUmBZh2ybovJ0tlu3o/AC7HYjU=
github.com/gogo/googleapis v1.3.2 h1:kX1es4djPJrsDhY7aZKJy7aZasdcB5oSOEphMjSB53c=
github.com/gogo/googleapis v1.3.2/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls=
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/protobuf v1.3.5 h1:F768QJ1E9tib+q5Sc8MkdJi1RxLTbRcTf8LJV56aRls=
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4=
github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk=
github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0=
github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8=
github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o=
github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU=
github.com/golangci/gocyclo v0.0.0-20180528144436-0a533e8fa43d/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU=
github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU=
github.com/golangci/golangci-lint v1.23.7/go.mod h1:g/38bxfhp4rI7zeWSxcdIeHTQGS58TCak8FYcyCmavQ=
github.com/golangci/golangci-lint v1.27.0/go.mod h1:+eZALfxIuthdrHPtfM7w/R3POJLjHDfJJw8XZl9xOng=
github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU=
github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg=
github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o=
github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA=
github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA=
github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI=
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
github.com/golangci/revgrep v0.0.0-20180812185044-276a5c0a1039/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/crfs v0.0.0-20191108021818-71d77da419c9/go.mod h1:etGhoOqfwPkooV6aqoX3eBGQOJblqdoc9XvWOeuxpPw=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.4.1 h1:/exdXoGamhu5ONeUJH0deniYLWYvQwW66yvlfiiKTu0=
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-containerregistry v0.0.0-20191010200024-a3d713f9b7f8/go.mod h1:KyKXa9ciM8+lgMXwOVsXi7UxGrsf9mM61Mzs+xKUrKE=
github.com/google/go-containerregistry v0.1.2/go.mod h1:GPivBPgdAyd2SU+vf6EpsgOtWDuPqjW0hJZt4rNdTZ4=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-github/v28 v28.1.1/go.mod h1:bsqJWQX05omyWVmc00nEUql9mhQyv38lDZ8kPZcQVoM=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/go-replayers/grpcreplay v0.1.0/go.mod h1:8Ig2Idjpr6gifRd6pNVggX6TC1Zw6Jx74AKp7QNH2QE=
github.com/google/go-replayers/httpreplay v0.1.0/go.mod h1:YKZViNhiGgqdBlUbI2MwGpq4pXxNmhJLPHQ7cv2b5no=
github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/rpmpack v0.0.0-20191226140753-aa36bfddb3a0/go.mod h1:RaTPr0KUf2K7fnZYLNDrr8rxAamWs3iNywJLtQ2AzBg=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/wire v0.3.0/go.mod h1:i1DMg/Lu8Sz5yYl25iOdmc5CT5qusaa+zmRWs16741s=
github.com/google/wire v0.4.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU=
github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
github.com/googleapis/gax-go v2.0.2+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
github.com/googleapis/gnostic v0.2.2/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg=
github.com/gookit/color v1.2.4/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg=
github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/goreleaser/goreleaser v0.136.0/go.mod h1:wiKrPUeSNh6Wu8nUHxZydSOVQ/OZvOaO7DTtFqie904=
github.com/goreleaser/nfpm v1.2.1/go.mod h1:TtWrABZozuLOttX2uDlYyECfQX7x5XYkVxhjYcR6G9w=
github.com/goreleaser/nfpm v1.3.0/go.mod h1:w0p7Kc9TAUgWMyrub63ex3M2Mgw88M4GZXoTq5UCb40=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE=
github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE=
github.com/gotestyourself/gotestyourself v2.2.0+incompatible/go.mod h1:zZKM6oeNM8k+FRljX1mnzVYeS8wiGgQyvST1/GafPbY=
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 h1:0IKlLyQ3Hs9nDaiK5cSHAGmcQEIC8l2Ts1u6x5Dfrqg=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0/go.mod h1:mJzapYve32yjrKlk9GbyCZHuPgZsrbyIbyKhSzOpg6s=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.6.2/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.2/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645 h1:MJG/KsmcqMwFAkh8mTnAwhyKoB+sTAnY4CACC110tbU=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw=
github.com/hanwen/go-fuse v1.0.0/go.mod h1:unqXarDXqzAk0rt98O2tVndEPIpUgLD9+rwFisZH3Ok=
github.com/hanwen/go-fuse/v2 v2.0.3/go.mod h1:0EQM6aH2ctVpvZ6a+onrQ/vaykxh2GH7hy3e13vzTUY=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-retryablehttp v0.6.4/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
github.com/hashicorp/go-retryablehttp v0.6.6/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.3/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/hashicorp/uuid v0.0.0-20160311170451-ebb0a03e909c/go.mod h1:fHzc09UnyJyqyW+bFuq864eh+wC7dj65aXmXLRe5to0=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/ishidawataru/sctp v0.0.0-20191218070446-00ab2ac2db07/go.mod h1:co9pwDoBCm1kGxawmb4sPq0cSIOOWNPT4KnHotMP1Zg=
github.com/jarcoal/httpmock v1.0.5/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik=
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a/go.mod h1:xRskid8CManxVta/ALEhJha/pweKBaVG6fWgc0yH25s=
github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0=
github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v0.0.0-20180701071628-ab8a2e0c74be/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.4.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU=
github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho=
github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-shellwords v1.0.10/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-zglob v0.0.1/go.mod h1:9fxibJccNxU2cnpIKLRRFA7zX7qhkJIQWBb449FYHOo=
github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ=
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.3.1 h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A=
github.com/moby/buildkit v0.8.1 h1:zrGxLwffKM8nVxBvaJa7H404eQLfqlg1GB6YVIzXVQ0=
github.com/moby/buildkit v0.8.1/go.mod h1:/kyU1hKy/aYCuP39GZA9MaKioovHku57N6cqlKZIaiQ=
github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
github.com/moby/sys/mount v0.1.0/go.mod h1:FVQFLDRWwyBjDTBNQXDlWnSFREqOo3OKX9aqhmeoo74=
github.com/moby/sys/mount v0.1.1 h1:mdhBytJ1SMmMat0gtzWWjFX/87K5j6E/7Q5z7rR0cZY=
github.com/moby/sys/mount v0.1.1/go.mod h1:FVQFLDRWwyBjDTBNQXDlWnSFREqOo3OKX9aqhmeoo74=
github.com/moby/sys/mountinfo v0.1.0/go.mod h1:w2t2Avltqx8vE7gX5l+QiBKxODu2TX0+Syr3h52Tw4o=
github.com/moby/sys/mountinfo v0.1.3/go.mod h1:w2t2Avltqx8vE7gX5l+QiBKxODu2TX0+Syr3h52Tw4o=
github.com/moby/sys/mountinfo v0.4.0 h1:1KInV3Huv18akCu58V7lzNlt+jFmqlu1EaErnEHE/VM=
github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A=
github.com/moby/term v0.0.0-20200915141129-7f0af18e79f2 h1:SPoLlS9qUUnXcIY4pvA4CTwYjk0Is5f4UPEkeESr53k=
github.com/moby/term v0.0.0-20200915141129-7f0af18e79f2/go.mod h1:TjQg8pa4iejrUrjiz0MCtMV38jdMNW4doKSiBrEvCQQ=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180320133207-05fbef0ca5da/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/mozilla/tls-observatory v0.0.0-20190404164649-a3c1b6cfecfd/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk=
github.com/mozilla/tls-observatory v0.0.0-20200317151703-4fa42e1c2dee/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk=
github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de h1:D5x39vF5KCwKQaw+OC9ZPiLVHXz3UFw2+psEX+gYcto=
github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de/go.mod h1:kJun4WP5gFuHZgRjZUWWuH1DTxCtxbHDOIJsudS8jzY=
github.com/mrunalp/fileutils v0.0.0-20200520151820-abd8a0e76976/go.mod h1:x8F1gnqOkIEiO4rqoeEEEqQbo7HjGMTvyoq3gej4iT0=
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
github.com/nakabonne/nestif v0.3.0/go.mod h1:dI314BppzXjJ4HsCnbo7XzrJHPszZsjnk5wEBSYHI2c=
github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU=
github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg=
github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.8.1/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI=
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v1.0.0-rc10/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v1.0.0-rc92 h1:+IczUKCRzDzFDnw99O/PAqrcBBCoRp9xN3cB1SYSNS4=
github.com/opencontainers/runc v1.0.0-rc92/go.mod h1:X1zlU4p7wOlX4+WRCz+hvlRv8phdL7UqbYD+vQwNMmE=
github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.3-0.20200728170252-4d89ac9fbff6 h1:NhsM2gc769rVWDqJvapK37r+7+CBXI8xHhnfnt8uQsg=
github.com/opencontainers/runtime-spec v1.0.3-0.20200728170252-4d89ac9fbff6/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs=
github.com/opencontainers/selinux v1.6.0 h1:+bIAS/Za3q5FTwWym4fTB0vObnfCf3G/NC7K6Jx62mY=
github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE=
github.com/opentracing-contrib/go-stdlib v1.0.0/go.mod h1:qtI1ogk+2JhVPIXVc6q+NHziSmy2W5GbdQZFUHADCBU=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
github.com/openzipkin/zipkin-go v0.1.3/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.8.0 h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw=
github.com/pelletier/go-toml v1.8.0/go.mod h1:D6yutnOGMveHEPV7VQOuvI/gXY61bv+9bAOTRnLElKs=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.5.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.0-20190522114515-bc1a522cf7b1/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI=
github.com/quasilyte/go-ruleguard v0.1.2-0.20200318202121-b00d7a75d3d8/go.mod h1:CGFX09Ci3pq9QZdj86B+VGIdNj4VyCo2iPOGS9esB/k=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/remyoudompheng/bigfft v0.0.0-20170806203942-52369c62f446/go.mod h1:uYEyJGbgTkfkS4+E/PavXkNJcbFIpEtjt2B0KDQ5+9M=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rogpeppe/fastuuid v1.1.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.6.2-0.20200830194709-1115b6af0369 h1:wdCVGtPadWC/ZuuLC7Hv58VQ5UF7V98ewE71n5mJfrM=
github.com/rogpeppe/go-internal v1.6.2-0.20200830194709-1115b6af0369/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.20.0 h1:38k9hgtUBdxFwE34yS8rTHmHBa4eN16E4DJlv177LNs=
github.com/rs/zerolog v1.20.0/go.mod h1:IzD0RJ65iWH0w97OQQebJEvTZYvsCUm9WVLWBQrJRjo=
github.com/rubiojr/go-vhd v0.0.0-20160810183302-0bfd3b39853c/go.mod h1:DM5xW0nvfNNm2uytzsvhI3OnX8uzaRAg8UX/CnDqbto=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryancurrah/gomodguard v1.0.4/go.mod h1:9T/Cfuxs5StfsocWr4WzDL36HqnX0fVb9d5fSEaLhoE=
github.com/ryancurrah/gomodguard v1.1.0/go.mod h1:4O8tr7hBODaGE6VIhfJDHcwzh5GUccKSJBU0UMXJFVM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/sassoftware/go-rpmutils v0.0.0-20190420191620-a8f1baeba37b/go.mod h1:am+Fp8Bt506lA3Rk3QCmSqmYmLMnPDhdDUcosQCAx+I=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
github.com/securego/gosec v0.0.0-20200103095621-79fbf3af8d83/go.mod h1:vvbZ2Ae7AzSq3/kywjUDxSNq2SJ27RxCz2un0H3ePqE=
github.com/securego/gosec v0.0.0-20200401082031-e946c8c39989/go.mod h1:i9l/TNj+yDFh9SZXUTvspXTjbFXgZGP/UvhU1S65A4A=
github.com/securego/gosec/v2 v2.3.0/go.mod h1:UzeVyUXbxukhLeHKV3VVqo7HdoQR9MrRfFmZYotn8ME=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc=
github.com/shirou/gopsutil v0.0.0-20190901111213-e4ec7b275ada/go.mod h1:WWnYX4lzhCH5h/3YBfyVA3VbLYjlMZZAQcW9ojMexNc=
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v1.0.0 h1:UVQPSSmc3qtTi+zPPkCXvZX9VvW/xT/NsRvKfwY81a8=
github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM=
github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM=
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/gunit v1.0.0/go.mod h1:qwPWnhz6pn0NnRBP++URONOVyNkPyr4SauJk4cUOwJs=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
github.com/sourcegraph/go-diff v0.5.1/go.mod h1:j2dHj3m8aZgQO8lMTcTnBcXkRRRqi34cd2MNlA9u1mE=
github.com/sourcegraph/go-diff v0.5.3/go.mod h1:v9JDtjCE4HHHCZGId75rg8gkKKa98RVjBcBGsVmMmak=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
github.com/spf13/cobra v1.0.0 h1:6m/oheQuQ13N9ks4hubMG6BnvwOeaJrqSPLahSnczz8=
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
github.com/spf13/viper v1.6.1/go.mod h1:t3iDnF5Jlj76alVNuyFBk5oUMCvsrkbvZK0WQdfDi5k=
github.com/spf13/viper v1.7.0 h1:xVKxvI7ouOI5I+U9s2eeiUfMaWBVoXA3AWskkrqK0VM=
github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg=
github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2 h1:b6uOv7YOFK0TYG7HtkIgExQo+2RdLuwRft63jn2HWj8=
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
github.com/tdakkota/asciicheck v0.0.0-20200416190851-d7f85be797a2/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM=
github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM=
github.com/tetafro/godot v0.3.7/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0=
github.com/tetafro/godot v0.4.2/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0=
github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk=
github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk=
github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0=
github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0=
github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPfx6jb1bBgRFjl5lytqVqZXEaeqWP8lTEao=
github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4=
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tommy-muehle/go-mnd v1.1.1/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig=
github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig=
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85 h1:014iQD8i8EabPWK2XgUuOTxg5s2nhfDmq6GupskfUO8=
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85/go.mod h1:a7cilN64dG941IOXfhJhlH0qB92hxJ9A1ewrdUmJ6xo=
github.com/tonistiigi/go-immutable-radix v0.0.0-20170803185627-826af9ccf0fe/go.mod h1:/+MCh11CJf2oz0BXmlmqyopK/ad1rKkcOXPoYuPCJYU=
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0=
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305 h1:y/1cL5AL2oRcfzz8CAHHhR6kDDfIOT0WEyH5k40sccM=
github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305/go.mod h1:gXOLibKqQTRAVuVZ9gX7G9Ykky8ll8yb4slxsEMoY0c=
github.com/uber/jaeger-client-go v2.25.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
github.com/uber/jaeger-lib v2.2.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
github.com/ulikunitz/xz v0.5.7/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ultraware/funlen v0.0.2/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA=
github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA=
github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/uudashr/gocognit v1.0.1/go.mod h1:j44Ayx2KW4+oB6SWMv8KsmHzZrOInQav7D3cQMJ5JUM=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasthttp v1.2.0/go.mod h1:4vX61m6KN+xDduDNwXrhIAVZaZaZiQ1luJk8LWSxF3s=
github.com/valyala/quicktemplate v1.2.0/go.mod h1:EH+4AkTd43SvgIbQHYu59/cJyxDoOVRUAfrukLPuGJ4=
github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio=
github.com/vdemeester/k8s-pkg-credentialprovider v1.17.4/go.mod h1:inCTmtUdr5KJbreVojo06krnTgaeAz/Z7lynpPk/Q2c=
github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU=
github.com/vmware/govmomi v0.20.3/go.mod h1:URlwyTFZX72RmxtxuaFL2Uj3fD1JTvZdx59bHWk6aFU=
github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243 h1:R43TdZy32XXSXjJn7M/HhALJ9imq6ztLnChfYJpVDnM=
github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/xanzy/go-gitlab v0.31.0/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug=
github.com/xanzy/go-gitlab v0.32.0/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0=
go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA=
go.opencensus.io v0.19.1/go.mod h1:gug0GbSHa8Pafr0d2urOSgoXHZ6x/RUlaiT0d9pqb4A=
go.opencensus.io v0.19.2/go.mod h1:NO/8qkisMZLZ1FCsKNqtJPwc8/TaclWyY0B6wcYNg9M=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3 h1:8sGtKOrtQqkN1bp2AtX+misvLIlOmsEsNd+9NIcPEm8=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE=
gocloud.dev v0.19.0/go.mod h1:SmKwiR8YwIMMJvQBKLsC3fHNyMwXLw3PMDO+VVteJMI=
golang.org/x/build v0.0.0-20190314133821-5284462c4bec/go.mod h1:atTaCNAy0f16Ah5aV1gMSwgiKVHwu/JncqDpuRr7lS4=
golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191002192127-34f69633bfdc/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9 h1:phUcVbl53swtrUN8kQEXFhUxPlIlWyBfKmidCu7P95o=
golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190312203227-4b39c73a6495/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20210126221216-84987778548c/go.mod h1:I6l2HNBLBZEcrOoCpyKLdY2lHoRZ8lI4x60KMCQDft4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20181217174547-8f45f776aaf1/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mobile v0.0.0-20201217150744-e6ae53a27f4f/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.1-0.20200828183125-ce943fd02449/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181029044818-c44066c5c816/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181108082009-03003ca0c849/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190619014844-b5b0513f8c1b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974 h1:IX6qOQeG5uLjB/hjjwjedwfjND0hgjPMMyO1RoIXQNI=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/oauth2 v0.0.0-20180724155351-3d292e4d0cdc/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 h1:SQFwaSi55rU7vdNs9Yr0Z324VNlrF+0wMqRXT4St8ck=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181029174526-d69651ed3497/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181218192612-074acd46bca6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190514135907-3a4b5fb9f71f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190620070143-6f217b454f45/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191210023423-ac6580df4449/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200120151820-655fe14d7479/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200917073148-efd3b9a0ff20/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201013081832-0aaa2718063a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c h1:VwygUrnw9jn88c4u8GD3rZQbqrP/tgas88tPUbBxQrk=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI=
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181219222714-6e267b5cc78e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190221204921-83362c3779f5/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190706070813-72ffa07ba3db/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
golang.org/x/tools v0.0.0-20190719005602-e377ae9d6386/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190828213141-aed303cbaa74/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113232020-e2727e816f5a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200102140908-9497f49d5709/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204192400-7124308813f3/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200331202046-9d5940d49312/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200502202811-ed308ab3e770/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200612220849-54c614fe050c/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ=
google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181220000619-583d854617af/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.2.0/go.mod h1:IfRCZScioGtypHNTlz3gFk67J8uePVW7uDTBzXuIkhU=
google.golang.org/api v0.3.0/go.mod h1:IuvZyQh8jgscv8qWfQ4ABd8m7hEudgBFM/EdhA3BnXw=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.5.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.6.0/go.mod h1:btoxGiFvQNVUZQ8W08zLtrVS08CNpINPEfxXxgJL1Q4=
google.golang.org/api v0.6.1-0.20190607001116-5213b8090861/go.mod h1:btoxGiFvQNVUZQ8W08zLtrVS08CNpINPEfxXxgJL1Q4=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.25.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63 h1:YzfoEYWbODU5Fbt37+h7X16BWQbad7Q4S6gclTKFXM8=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
google.golang.org/grpc v1.29.1 h1:EC2SB8S04d2r73uptxphDSUG+kTKVgjRPF+N3xpxRB4=
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/gcfg.v1 v1.2.0/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.56.0 h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y=
gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/warnings.v0 v0.1.1/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200121175148-a6ecf24a6d71/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86 h1:OfFoIUYv/me30yv7XlMy4F9RJw8DEm8WQ6QG1Ph4bH0=
gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
gotest.tools/v3 v3.0.2 h1:kG1BFyqVHuQoVQiR1bWGnfz/fmHvvuiSPIV7rvl360E=
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20180920025451-e3ad64cb4ed3/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.5/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
k8s.io/api v0.0.0-20180904230853-4e7be11eab3f/go.mod h1:iuAfoD4hCxJ8Onx9kaTIt30j7jUFS00AXQi6QMi99vA=
k8s.io/api v0.17.4/go.mod h1:5qxx6vjmwUVG2nHQTKGlLts8Tbok8PzHl4vHtVFuZCA=
k8s.io/api v0.19.0/go.mod h1:I1K45XlvTrDjmj5LoM5LuP/KYrhWbjUKT/SoPG0qTjw=
k8s.io/apimachinery v0.0.0-20180904193909-def12e63c512/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0=
k8s.io/apimachinery v0.17.4/go.mod h1:gxLnyZcGNdZTCLnq3fgzyg2A5BVCHTNDFrw8AmuJ+0g=
k8s.io/apimachinery v0.19.0/go.mod h1:DnPGDnARWFvYa3pMHgSxtbZb7gpzzAZ1pTfaUNDVlmA=
k8s.io/apiserver v0.17.4/go.mod h1:5ZDQ6Xr5MNBxyi3iUZXS84QOhZl+W7Oq2us/29c0j9I=
k8s.io/client-go v0.0.0-20180910083459-2cefa64ff137/go.mod h1:7vJpHMYJwNQCWgzmNV+VYUl1zCObLyodBc8nIyt8L5s=
k8s.io/client-go v0.17.4/go.mod h1:ouF6o5pz3is8qU0/qYL2RnoxOPqgfuidYLowytyLJmc=
k8s.io/client-go v0.19.0/go.mod h1:H9E/VT95blcFQnlyShFgnFT9ZnJOAceiUHM3MlRC+mU=
k8s.io/cloud-provider v0.17.4/go.mod h1:XEjKDzfD+b9MTLXQFlDGkk6Ho8SGMpaU8Uugx/KNK9U=
k8s.io/code-generator v0.17.2/go.mod h1:DVmfPQgxQENqDIzVR2ddLXMH34qeszkKSdH/N+s+38s=
k8s.io/component-base v0.17.4/go.mod h1:5BRqHMbbQPm2kKu35v3G+CpVq4K0RJKC7TRioF0I9lE=
k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM=
k8s.io/csi-translation-lib v0.17.4/go.mod h1:CsxmjwxEI0tTNMzffIAcgR9lX4wOh6AKHdxQrT7L0oo=
k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/gengo v0.0.0-20190822140433-26a664648505/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE=
k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y=
k8s.io/kube-openapi v0.0.0-20180731170545-e3762e86a74c/go.mod h1:BXM9ceUBTj2QnfH2MK1odQs778ajze1RxcmP6S8RVVc=
k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E=
k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o=
k8s.io/kubernetes v1.11.10/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=
k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=
k8s.io/legacy-cloud-providers v0.17.4/go.mod h1:FikRNoD64ECjkxO36gkDgJeiQWwyZTuBkhu+yxOc1Js=
k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
k8s.io/utils v0.0.0-20200729134348-d5654de09c73/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
modernc.org/cc v1.0.0/go.mod h1:1Sk4//wdnYJiUIxnW8ddKpaOJCF37yAdqYnkxUpaYxw=
modernc.org/golex v1.0.0/go.mod h1:b/QX9oBD/LhixY6NDh+IdGv17hgB+51fET1i2kPSmvk=
modernc.org/mathutil v1.0.0/go.mod h1:wU0vUrJsVWBZ4P6e7xtFJEhFSNsfRLJ8H458uRjg03k=
modernc.org/strutil v1.0.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs=
modernc.org/xc v1.0.0/go.mod h1:mRNCo0bvLjGhHO9WsyuKVU4q0ceiDDDoEeWDJHrNx8I=
mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc=
mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4=
mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f/go.mod h1:4G1h5nDURzA3bwVMZIVpwbkw+04kSxk3rAtzlimaUJw=
mvdan.cc/unparam v0.0.0-20200501210554-b37ab49443f7/go.mod h1:HGC5lll35J70Y5v7vCGb9oLhHoScFwkHDJm/05RdSTc=
pack.ag/amqp v0.11.2/go.mod h1:4/cbmt4EJXSKlG6LCfWHoqmN0uFdy5i/+YFz+fTfhV4=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI=
sigs.k8s.io/structured-merge-diff v1.0.1-0.20191108220359-b1b620dd3f06/go.mod h1:/ULNhyfzRopfcjskuui0cTITekDduZ7ycKN3oUT9R18=
sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0=
sourcegraph.com/sqs/pbtypes v1.0.0/go.mod h1:3AciMUv4qUuRHRHhOG4TZOB+72GdPVz5k+c648qsFS4=
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 1,955 |
🐞 Dagger: runtime: out of memory
|
### What is the issue?
I have written some of my own CUE Modules, which may go out of memory when run multiple times over a long period of time.
### Log output

### Steps to reproduce
_No response_
### Dagger version
dagger devel (b32c8732) linux/amd64
### OS version
Linux VM-0-6-ubuntu 4.15.0-159-generic #167-Ubuntu SMP Tue Sep 21 08:55:05 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux
|
https://github.com/dagger/dagger/issues/1955
|
https://github.com/dagger/dagger/pull/143
|
6eb3d5afbda9e1ee223069be8a32e750c779edd3
|
50e8b8c07dd39382281dbfb0f8878ebcad637220
| 2022-03-31T07:24:07Z |
go
| 2021-03-01T22:03:23Z |
.github/workflows/ci.yml
|
name: CI
on:
push:
branches: [main]
pull_request:
branches: [main]
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- name: Set up Go
uses: actions/setup-go@v1
with:
go-version: 1.16
id: go
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends shellcheck
curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sudo sh -s -- -b /usr/local/bin v1.23.8
curl -L https://github.com/cuelang/cue/releases/download/v0.3.0-beta.4/cue_0.3.0-beta.4_Linux_x86_64.tar.gz | sudo tar zxf - -C /usr/local/bin
- name: Check out
uses: actions/checkout@v2
- name: Build
run: |
make
- name: Lint
run: |
make lint
- name: Start buildkit
run: |
docker run -d --name buildkitd --privileged moby/buildkit:v0.8.1@sha256:ecd5ad4910c322cad6995f8a1a0805d9da4b09ed4aaef40627f5bcb8ebf74068
- name: Integration test
run: |
make integration
- name: Test
run: |
make test
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 1,955 |
🐞 Dagger: runtime: out of memory
|
### What is the issue?
I have written some of my own CUE Modules, which may go out of memory when run multiple times over a long period of time.
### Log output

### Steps to reproduce
_No response_
### Dagger version
dagger devel (b32c8732) linux/amd64
### OS version
Linux VM-0-6-ubuntu 4.15.0-159-generic #167-Ubuntu SMP Tue Sep 21 08:55:05 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux
|
https://github.com/dagger/dagger/issues/1955
|
https://github.com/dagger/dagger/pull/143
|
6eb3d5afbda9e1ee223069be8a32e750c779edd3
|
50e8b8c07dd39382281dbfb0f8878ebcad637220
| 2022-03-31T07:24:07Z |
go
| 2021-03-01T22:03:23Z |
go.mod
|
module dagger.io/go
go 1.16
require (
cuelang.org/go v0.3.0-beta.5
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db
github.com/containerd/console v1.0.1
github.com/docker/distribution v2.7.1+incompatible // indirect
github.com/emicklei/proto v1.9.0 // indirect
github.com/jaguilar/vt100 v0.0.0-20150826170717-2703a27b14ea
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db
github.com/moby/buildkit v0.8.1
github.com/morikuni/aec v1.0.0
github.com/opencontainers/go-digest v1.0.0
github.com/rs/zerolog v1.20.0
github.com/spf13/cobra v1.1.3
github.com/spf13/pflag v1.0.5
github.com/spf13/viper v1.7.1
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c // indirect
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1
golang.org/x/tools v0.1.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86
)
replace (
// protobuf: corresponds to containerd
github.com/golang/protobuf => github.com/golang/protobuf v1.3.5
github.com/hashicorp/go-immutable-radix => github.com/tonistiigi/go-immutable-radix v0.0.0-20170803185627-826af9ccf0fe
github.com/jaguilar/vt100 => github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305
// genproto: corresponds to containerd
google.golang.org/genproto => google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63
)
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 1,955 |
🐞 Dagger: runtime: out of memory
|
### What is the issue?
I have written some of my own CUE Modules, which may go out of memory when run multiple times over a long period of time.
### Log output

### Steps to reproduce
_No response_
### Dagger version
dagger devel (b32c8732) linux/amd64
### OS version
Linux VM-0-6-ubuntu 4.15.0-159-generic #167-Ubuntu SMP Tue Sep 21 08:55:05 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux
|
https://github.com/dagger/dagger/issues/1955
|
https://github.com/dagger/dagger/pull/143
|
6eb3d5afbda9e1ee223069be8a32e750c779edd3
|
50e8b8c07dd39382281dbfb0f8878ebcad637220
| 2022-03-31T07:24:07Z |
go
| 2021-03-01T22:03:23Z |
go.sum
|
bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
cloud.google.com/go v0.25.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.37.2/go.mod h1:H8IAquKe2L30IxoupDgqTaQvKSwF/c8prYHynGIWQbA=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.39.0/go.mod h1:rVLT6fkc8chs9sfPtFc1SBH6em7n+ZoXaG+87tDISts=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.51.0/go.mod h1:hWtGJ6gnXH+KgDv+V0zFGDvpi07n3z8ZNj3T1RW0Gcw=
cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
code.gitea.io/sdk/gitea v0.12.0/go.mod h1:z3uwDV/b9Ls47NGukYM9XhnHtqPh/J+t40lsUrR6JDY=
contrib.go.opencensus.io/exporter/aws v0.0.0-20181029163544-2befc13012d0/go.mod h1:uu1P0UCM/6RbsMrgPa98ll8ZcHM858i/AD06a9aLRCA=
contrib.go.opencensus.io/exporter/ocagent v0.5.0/go.mod h1:ImxhfLRpxoYiSq891pBrLVhN+qmP8BTVvdH2YLs7Gl0=
contrib.go.opencensus.io/exporter/stackdriver v0.12.1/go.mod h1:iwB6wGarfphGGe/e5CWqyUk/cLzKnWsOKPVW3no6OTw=
contrib.go.opencensus.io/integrations/ocsql v0.1.4/go.mod h1:8DsSdjz3F+APR+0z0WkU1aRorQCFfRxvqjUUPMbF3fE=
contrib.go.opencensus.io/resource v0.1.1/go.mod h1:F361eGI91LCmW1I/Saf+rX0+OFcigGlFvXwEGEnkRLA=
cuelang.org/go v0.3.0-beta.5 h1:c+zS9MBCFzbuz+RI+3dp2PwfnS05aF3MqSEaAGFwzSk=
cuelang.org/go v0.3.0-beta.5/go.mod h1:Ikvs157igkGV5gFUdYSFa+lWp/CDteVhubPTXyvPRtA=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
git.apache.org/thrift.git v0.12.0/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
github.com/AkihiroSuda/containerd-fuse-overlayfs v1.0.0/go.mod h1:0mMDvQFeLbbn1Wy8P2j3hwFhqBq+FKn8OZPno8WLmp8=
github.com/Azure/azure-amqp-common-go/v2 v2.1.0/go.mod h1:R8rea+gJRuJR6QxTir/XuEd+YuKoUiazDC/N96FiDEU=
github.com/Azure/azure-pipeline-go v0.2.1/go.mod h1:UGSo8XybXnIGZ3epmeBw7Jdz+HiUVpqIlpz/HKHylF4=
github.com/Azure/azure-pipeline-go v0.2.2/go.mod h1:4rQ/NZncSvGqNkkOsNpOU1tgoNuIlp9AfUH5G1tvCHc=
github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v19.1.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v29.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v30.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v35.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v38.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v42.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-service-bus-go v0.9.1/go.mod h1:yzBx6/BUGfjfeqbRZny9AQIbIe3AcV9WZbAdpkoXOa0=
github.com/Azure/azure-storage-blob-go v0.8.0/go.mod h1:lPI3aLPpuLTeUwh1sViKXFxwl2B6teiRqI0deQUvsw0=
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8=
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v10.15.5+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v12.0.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v14.1.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0=
github.com/Azure/go-autorest/autorest v0.9.6/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630=
github.com/Azure/go-autorest/autorest v0.10.2/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630=
github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc=
github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/adal v0.8.2/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/adal v0.8.3/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM=
github.com/Azure/go-autorest/autorest/azure/cli v0.3.1/go.mod h1:ZG5p860J94/0kI9mNJVoIoLgXcirM2gF5i2kWloofxw=
github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g=
github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM=
github.com/Azure/go-autorest/autorest/to v0.2.0/go.mod h1:GunWKJp1AEqgMaGLV+iocmRAJWqST1wQYhyyjXJ3SJc=
github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA=
github.com/Azure/go-autorest/autorest/validation v0.1.0/go.mod h1:Ha3z/SqBeaalWQvokg3NZAlQTalVMtOIAs1aGK7G6u8=
github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI=
github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Djarvur/go-err113 v0.0.0-20200410182137-af658d038157/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
github.com/Djarvur/go-err113 v0.1.0/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
github.com/GoogleCloudPlatform/cloudsql-proxy v0.0.0-20191009163259-e802c2cb94ae/go.mod h1:mjwGPas4yKduTyubHvD1Atl9r1rUq8DfVy+gkVvZ+oo=
github.com/GoogleCloudPlatform/k8s-cloud-provider v0.0.0-20190822182118-27a4ced34534/go.mod h1:iroGtC8B3tQiqtds1l+mgk/BBOrxbqjH+eUfFQYRc14=
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db h1:Zkf5kwhxdW0xV7WM/crqIcOP5LCFGnAmumWSFAewJ74=
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db/go.mod h1:RU+6d0CNIRSp6yo1mXLIIrnFa/3LHhvcDVLVJyovptM=
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/Masterminds/semver/v3 v3.0.3/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/Masterminds/semver/v3 v3.1.0/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/go-winio v0.4.15-0.20200908182639-5b44b70ab3ab/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/go-winio v0.4.15 h1:qkLXKzb1QoVatRyd/YlXZ/Kg0m5K3SPuoD82jjSOaBc=
github.com/Microsoft/go-winio v0.4.15/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ=
github.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg38RRsjT5y8=
github.com/Microsoft/hcsshim v0.8.10 h1:k5wTrpnVU2/xv8ZuzGkbXVd3js5zJ8RnumPo5RxiIxU=
github.com/Microsoft/hcsshim v0.8.10/go.mod h1:g5uw8EV2mAlzqe94tfNBNdr89fnbD/n3HV0OhsddkmM=
github.com/Microsoft/hcsshim/test v0.0.0-20200826032352-301c83a30e7c/go.mod h1:30A5igQ91GEmhYJF8TaRP79pMBOYynRsyOByfVV0dU4=
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM=
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/alecthomas/kingpin v2.2.6+incompatible/go.mod h1:59OFYbFVLKQKq+mqrL6Rw5bR0c3ACQaawgXx0QYndlE=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apex/log v1.1.4/go.mod h1:AlpoD9aScyQfJDVHmLMEcx4oU6LqzkWp4Mg9GdAcEvQ=
github.com/apex/log v1.3.0/go.mod h1:jd8Vpsr46WAe3EZSQ/IUMs2qQD/GOycT5rPWCO1yGcs=
github.com/apex/logs v0.0.4/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo=
github.com/aphistic/golf v0.0.0-20180712155816-02c07f170c5a/go.mod h1:3NqKYiepwy8kCu4PNA+aP7WUV72eXWJeP9/r3/K9aLE=
github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3stzu0Xys=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.15.90/go.mod h1:es1KtYUFs7le0xQ3rOihkuoVD90z7D0fR2Qm4S00/gU=
github.com/aws/aws-sdk-go v1.16.26/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.18/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.45/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.25.11/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.27.1/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.31.6/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA=
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb/go.mod h1:PkYb9DJNAwrSvRx5DYA+gUcOIgTGVMNkfSCbZM8cWpI=
github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bombsimon/wsl/v2 v2.0.0/go.mod h1:mf25kr/SqFEPhhcxW1+7pxzGlW+hIl/hYTKY95VwV8U=
github.com/bombsimon/wsl/v2 v2.2.0/go.mod h1:Azh8c3XGEJl9LyX0/sFC+CKMc7Ssgua0g+6abzXN4Pg=
github.com/bombsimon/wsl/v3 v3.0.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc=
github.com/bombsimon/wsl/v3 v3.1.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk=
github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8=
github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50=
github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE=
github.com/caarlos0/ctrlc v1.0.0/go.mod h1:CdXpj4rmq0q/1Eb44M9zi2nKB0QraNKuRGYGrrHhcQw=
github.com/campoy/unique v0.0.0-20180121183637-88950e537e7e/go.mod h1:9IOqJGCPMSc6E5ydlp5NIonxObaeu/Iub/X03EKPVYo=
github.com/cavaliercoder/go-cpio v0.0.0-20180626203310-925f9528c45e/go.mod h1:oDpT4efm8tSYHXV5tHSdRvBet/b/QzxZ+XyyPehvm3A=
github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg=
github.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLIdUjrmSXlK9pkrsDlLHbO8jiB8X8JnOc=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/cockroachdb/apd/v2 v2.0.1 h1:y1Rh3tEU89D+7Tgbw+lp52T6p/GJLpDmNvr10UWqLTE=
github.com/cockroachdb/apd/v2 v2.0.1/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw=
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
github.com/codahale/hdrhistogram v0.0.0-20160425231609-f8ad88b59a58/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko=
github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM=
github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340 h1:9atoWyI9RtXFwf7UDbme/6M8Ud0rFrx+Q3ZWgSnsxtw=
github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo=
github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw=
github.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE=
github.com/containerd/console v1.0.0/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE=
github.com/containerd/console v1.0.1 h1:u7SFAJyRqWcG6ogaMAx3KjSTy1e3hT9QxqX7Jco7dRc=
github.com/containerd/console v1.0.1/go.mod h1:XUsP6YE/mKtz6bxc+I8UiKKTP04qjQL4qcS3XoQ5xkw=
github.com/containerd/containerd v1.3.0-beta.2.0.20190828155532-0293cbd26c69/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.3.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.4.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.4.1-0.20201117152358-0edc412565dc h1:XbZ/DDsFDigeOQ9M3YXhvE6d1AEHdxKAzIgkswip7dI=
github.com/containerd/containerd v1.4.1-0.20201117152358-0edc412565dc/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=
github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe h1:PEmIrUvwG9Yyv+0WKZqjXfSFDeZjs/q15g0m08BYS9k=
github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo=
github.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI=
github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b h1:qUtCegLdOUVfVJOw+KDg6eJyE1TGvLlkGEd1091kSSQ=
github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0=
github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU=
github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0=
github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g=
github.com/containerd/go-runc v0.0.0-20201020171139-16b287bc67d0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok=
github.com/containerd/stargz-snapshotter v0.0.0-20201027054423-3a04e4c2c116/go.mod h1:o59b3PCKVAf9jjiKtCc/9hLAd+5p/rfhBfm6aBcTEr4=
github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o=
github.com/containerd/ttrpc v1.0.1 h1:IfVOxKbjyBn9maoye2JN95pgGYOmPkQVqxtOu7rtNIc=
github.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y=
github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc=
github.com/containerd/typeurl v1.0.1 h1:PvuK4E3D5S5q6IqsPDCy928FhP0LUIGcmZ/Yhgp5Djw=
github.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg=
github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY=
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/coreos/pkg v0.0.0-20180108230652-97fdf19511ea/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=
github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0=
github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY=
github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=
github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=
github.com/docker/cli v0.0.0-20190925022749-754388324470/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v20.10.0-beta1.0.20201029214301-1d20b15adc38+incompatible h1:r99CiNpN5pxrSuSH36suYxrbLxFOhBvQ0sEH6624MHs=
github.com/docker/cli v20.10.0-beta1.0.20201029214301-1d20b15adc38+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY=
github.com/docker/distribution v2.6.0-rc.1.0.20180327202408-83389a148052+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v0.0.0-20200511152416-a93e9eb0e95c/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v0.7.3-0.20190327010347-be7ac8be2ae0/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v1.4.2-0.20180531152204-71cd53e4a197/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v17.12.0-ce-rc1.0.20200730172259-9f28837c1d93+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v20.10.0-beta1.0.20201110211921-af34b94a78a1+incompatible h1:J2OhsbfqoBRRT048iD/tqXBvEQWQATQ8vew6LqQmDSU=
github.com/docker/docker v20.10.0-beta1.0.20201110211921-af34b94a78a1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c h1:+pKlWGMw7gf6bQ+oDZB4KHQFypsfjYlq/C4rfL7D3g8=
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=
github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI=
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/libnetwork v0.8.0-dev.2.0.20200917202933-d0951081b35f h1:jC/ZXgYdzCUuKFkKGNiekhnIkGfUrdelEqvg4Miv440=
github.com/docker/libnetwork v0.8.0-dev.2.0.20200917202933-d0951081b35f/go.mod h1:93m0aTqz6z+g32wla4l4WxTrdtvBRmVzYRkYvasA5Z8=
github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE=
github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/proto v1.6.15/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
github.com/emicklei/proto v1.9.0 h1:l0QiNT6Qs7Yj0Mb4X6dnWBQer4ebei2BFcgQLbGqUDc=
github.com/emicklei/proto v1.9.0/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/fortytw2/leaktest v1.2.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-critic/go-critic v0.4.1/go.mod h1:7/14rZGnZbY6E38VEGk2kVhoq6itzc1E68facVDK23g=
github.com/go-critic/go-critic v0.4.3/go.mod h1:j4O3D4RoIwRqlZw5jJpx0BNfXWWbpcJoKu5cYSe4YmQ=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-lintpack/lintpack v0.5.2/go.mod h1:NwZuYi2nUHho8XEIZ6SIxihrnPoqBTDqfpXvXAN0sXM=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8=
github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo=
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4=
github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ=
github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY=
github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY=
github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086/go.mod h1:mP93XdblcopXwlyN4X4uodxXQhldPGZbcEJIimQHrkg=
github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw=
github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU=
github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30/go.mod h1:SV2ur98SGypH1UjcPpCatrV5hPazG6+IfNHbkDXBRrk=
github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI=
github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8/go.mod h1:WoMrjiy4zvdS+Bg6z9jZH82QXwkcgCBX6nOfnmdaHks=
github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc=
github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gofrs/flock v0.0.0-20190320160742-5135e617513b/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
github.com/gofrs/flock v0.7.3 h1:I0EKY9l8HZCXTMYC4F80vwT6KNypV9uYKP3Alm/hjmQ=
github.com/gofrs/flock v0.7.3/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
github.com/gogo/googleapis v1.2.0/go.mod h1:Njal3psf3qN6dwBtQfUmBZh2ybovJ0tlu3o/AC7HYjU=
github.com/gogo/googleapis v1.3.2 h1:kX1es4djPJrsDhY7aZKJy7aZasdcB5oSOEphMjSB53c=
github.com/gogo/googleapis v1.3.2/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls=
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/protobuf v1.3.5 h1:F768QJ1E9tib+q5Sc8MkdJi1RxLTbRcTf8LJV56aRls=
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4=
github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk=
github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0=
github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8=
github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o=
github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU=
github.com/golangci/gocyclo v0.0.0-20180528144436-0a533e8fa43d/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU=
github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU=
github.com/golangci/golangci-lint v1.23.7/go.mod h1:g/38bxfhp4rI7zeWSxcdIeHTQGS58TCak8FYcyCmavQ=
github.com/golangci/golangci-lint v1.27.0/go.mod h1:+eZALfxIuthdrHPtfM7w/R3POJLjHDfJJw8XZl9xOng=
github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU=
github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg=
github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o=
github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA=
github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA=
github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI=
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
github.com/golangci/revgrep v0.0.0-20180812185044-276a5c0a1039/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/crfs v0.0.0-20191108021818-71d77da419c9/go.mod h1:etGhoOqfwPkooV6aqoX3eBGQOJblqdoc9XvWOeuxpPw=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.4.1 h1:/exdXoGamhu5ONeUJH0deniYLWYvQwW66yvlfiiKTu0=
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-containerregistry v0.0.0-20191010200024-a3d713f9b7f8/go.mod h1:KyKXa9ciM8+lgMXwOVsXi7UxGrsf9mM61Mzs+xKUrKE=
github.com/google/go-containerregistry v0.1.2/go.mod h1:GPivBPgdAyd2SU+vf6EpsgOtWDuPqjW0hJZt4rNdTZ4=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-github/v28 v28.1.1/go.mod h1:bsqJWQX05omyWVmc00nEUql9mhQyv38lDZ8kPZcQVoM=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/go-replayers/grpcreplay v0.1.0/go.mod h1:8Ig2Idjpr6gifRd6pNVggX6TC1Zw6Jx74AKp7QNH2QE=
github.com/google/go-replayers/httpreplay v0.1.0/go.mod h1:YKZViNhiGgqdBlUbI2MwGpq4pXxNmhJLPHQ7cv2b5no=
github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/rpmpack v0.0.0-20191226140753-aa36bfddb3a0/go.mod h1:RaTPr0KUf2K7fnZYLNDrr8rxAamWs3iNywJLtQ2AzBg=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/wire v0.3.0/go.mod h1:i1DMg/Lu8Sz5yYl25iOdmc5CT5qusaa+zmRWs16741s=
github.com/google/wire v0.4.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU=
github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
github.com/googleapis/gax-go v2.0.2+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
github.com/googleapis/gnostic v0.2.2/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg=
github.com/gookit/color v1.2.4/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg=
github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/goreleaser/goreleaser v0.136.0/go.mod h1:wiKrPUeSNh6Wu8nUHxZydSOVQ/OZvOaO7DTtFqie904=
github.com/goreleaser/nfpm v1.2.1/go.mod h1:TtWrABZozuLOttX2uDlYyECfQX7x5XYkVxhjYcR6G9w=
github.com/goreleaser/nfpm v1.3.0/go.mod h1:w0p7Kc9TAUgWMyrub63ex3M2Mgw88M4GZXoTq5UCb40=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE=
github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE=
github.com/gotestyourself/gotestyourself v2.2.0+incompatible/go.mod h1:zZKM6oeNM8k+FRljX1mnzVYeS8wiGgQyvST1/GafPbY=
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 h1:0IKlLyQ3Hs9nDaiK5cSHAGmcQEIC8l2Ts1u6x5Dfrqg=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0/go.mod h1:mJzapYve32yjrKlk9GbyCZHuPgZsrbyIbyKhSzOpg6s=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.6.2/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.2/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645 h1:MJG/KsmcqMwFAkh8mTnAwhyKoB+sTAnY4CACC110tbU=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw=
github.com/hanwen/go-fuse v1.0.0/go.mod h1:unqXarDXqzAk0rt98O2tVndEPIpUgLD9+rwFisZH3Ok=
github.com/hanwen/go-fuse/v2 v2.0.3/go.mod h1:0EQM6aH2ctVpvZ6a+onrQ/vaykxh2GH7hy3e13vzTUY=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-retryablehttp v0.6.4/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
github.com/hashicorp/go-retryablehttp v0.6.6/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.3/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/hashicorp/uuid v0.0.0-20160311170451-ebb0a03e909c/go.mod h1:fHzc09UnyJyqyW+bFuq864eh+wC7dj65aXmXLRe5to0=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/ishidawataru/sctp v0.0.0-20191218070446-00ab2ac2db07/go.mod h1:co9pwDoBCm1kGxawmb4sPq0cSIOOWNPT4KnHotMP1Zg=
github.com/jarcoal/httpmock v1.0.5/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik=
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a/go.mod h1:xRskid8CManxVta/ALEhJha/pweKBaVG6fWgc0yH25s=
github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0=
github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v0.0.0-20180701071628-ab8a2e0c74be/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.4.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU=
github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho=
github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-shellwords v1.0.10/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-zglob v0.0.1/go.mod h1:9fxibJccNxU2cnpIKLRRFA7zX7qhkJIQWBb449FYHOo=
github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ=
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.3.1 h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A=
github.com/moby/buildkit v0.8.1 h1:zrGxLwffKM8nVxBvaJa7H404eQLfqlg1GB6YVIzXVQ0=
github.com/moby/buildkit v0.8.1/go.mod h1:/kyU1hKy/aYCuP39GZA9MaKioovHku57N6cqlKZIaiQ=
github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
github.com/moby/sys/mount v0.1.0/go.mod h1:FVQFLDRWwyBjDTBNQXDlWnSFREqOo3OKX9aqhmeoo74=
github.com/moby/sys/mount v0.1.1 h1:mdhBytJ1SMmMat0gtzWWjFX/87K5j6E/7Q5z7rR0cZY=
github.com/moby/sys/mount v0.1.1/go.mod h1:FVQFLDRWwyBjDTBNQXDlWnSFREqOo3OKX9aqhmeoo74=
github.com/moby/sys/mountinfo v0.1.0/go.mod h1:w2t2Avltqx8vE7gX5l+QiBKxODu2TX0+Syr3h52Tw4o=
github.com/moby/sys/mountinfo v0.1.3/go.mod h1:w2t2Avltqx8vE7gX5l+QiBKxODu2TX0+Syr3h52Tw4o=
github.com/moby/sys/mountinfo v0.4.0 h1:1KInV3Huv18akCu58V7lzNlt+jFmqlu1EaErnEHE/VM=
github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A=
github.com/moby/term v0.0.0-20200915141129-7f0af18e79f2 h1:SPoLlS9qUUnXcIY4pvA4CTwYjk0Is5f4UPEkeESr53k=
github.com/moby/term v0.0.0-20200915141129-7f0af18e79f2/go.mod h1:TjQg8pa4iejrUrjiz0MCtMV38jdMNW4doKSiBrEvCQQ=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180320133207-05fbef0ca5da/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/mozilla/tls-observatory v0.0.0-20190404164649-a3c1b6cfecfd/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk=
github.com/mozilla/tls-observatory v0.0.0-20200317151703-4fa42e1c2dee/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk=
github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de h1:D5x39vF5KCwKQaw+OC9ZPiLVHXz3UFw2+psEX+gYcto=
github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de/go.mod h1:kJun4WP5gFuHZgRjZUWWuH1DTxCtxbHDOIJsudS8jzY=
github.com/mrunalp/fileutils v0.0.0-20200520151820-abd8a0e76976/go.mod h1:x8F1gnqOkIEiO4rqoeEEEqQbo7HjGMTvyoq3gej4iT0=
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
github.com/nakabonne/nestif v0.3.0/go.mod h1:dI314BppzXjJ4HsCnbo7XzrJHPszZsjnk5wEBSYHI2c=
github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU=
github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg=
github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.8.1/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI=
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v1.0.0-rc10/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v1.0.0-rc92 h1:+IczUKCRzDzFDnw99O/PAqrcBBCoRp9xN3cB1SYSNS4=
github.com/opencontainers/runc v1.0.0-rc92/go.mod h1:X1zlU4p7wOlX4+WRCz+hvlRv8phdL7UqbYD+vQwNMmE=
github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.3-0.20200728170252-4d89ac9fbff6 h1:NhsM2gc769rVWDqJvapK37r+7+CBXI8xHhnfnt8uQsg=
github.com/opencontainers/runtime-spec v1.0.3-0.20200728170252-4d89ac9fbff6/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs=
github.com/opencontainers/selinux v1.6.0 h1:+bIAS/Za3q5FTwWym4fTB0vObnfCf3G/NC7K6Jx62mY=
github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE=
github.com/opentracing-contrib/go-stdlib v1.0.0/go.mod h1:qtI1ogk+2JhVPIXVc6q+NHziSmy2W5GbdQZFUHADCBU=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
github.com/openzipkin/zipkin-go v0.1.3/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.8.0 h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw=
github.com/pelletier/go-toml v1.8.0/go.mod h1:D6yutnOGMveHEPV7VQOuvI/gXY61bv+9bAOTRnLElKs=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.5.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.0-20190522114515-bc1a522cf7b1/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.5 h1:3+auTFlqw+ZaQYJARz6ArODtkaIwtvBTx3N2NehQlL8=
github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI=
github.com/quasilyte/go-ruleguard v0.1.2-0.20200318202121-b00d7a75d3d8/go.mod h1:CGFX09Ci3pq9QZdj86B+VGIdNj4VyCo2iPOGS9esB/k=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/remyoudompheng/bigfft v0.0.0-20170806203942-52369c62f446/go.mod h1:uYEyJGbgTkfkS4+E/PavXkNJcbFIpEtjt2B0KDQ5+9M=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rogpeppe/fastuuid v1.1.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.6.2-0.20200830194709-1115b6af0369 h1:wdCVGtPadWC/ZuuLC7Hv58VQ5UF7V98ewE71n5mJfrM=
github.com/rogpeppe/go-internal v1.6.2-0.20200830194709-1115b6af0369/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.20.0 h1:38k9hgtUBdxFwE34yS8rTHmHBa4eN16E4DJlv177LNs=
github.com/rs/zerolog v1.20.0/go.mod h1:IzD0RJ65iWH0w97OQQebJEvTZYvsCUm9WVLWBQrJRjo=
github.com/rubiojr/go-vhd v0.0.0-20160810183302-0bfd3b39853c/go.mod h1:DM5xW0nvfNNm2uytzsvhI3OnX8uzaRAg8UX/CnDqbto=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryancurrah/gomodguard v1.0.4/go.mod h1:9T/Cfuxs5StfsocWr4WzDL36HqnX0fVb9d5fSEaLhoE=
github.com/ryancurrah/gomodguard v1.1.0/go.mod h1:4O8tr7hBODaGE6VIhfJDHcwzh5GUccKSJBU0UMXJFVM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/sassoftware/go-rpmutils v0.0.0-20190420191620-a8f1baeba37b/go.mod h1:am+Fp8Bt506lA3Rk3QCmSqmYmLMnPDhdDUcosQCAx+I=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
github.com/securego/gosec v0.0.0-20200103095621-79fbf3af8d83/go.mod h1:vvbZ2Ae7AzSq3/kywjUDxSNq2SJ27RxCz2un0H3ePqE=
github.com/securego/gosec v0.0.0-20200401082031-e946c8c39989/go.mod h1:i9l/TNj+yDFh9SZXUTvspXTjbFXgZGP/UvhU1S65A4A=
github.com/securego/gosec/v2 v2.3.0/go.mod h1:UzeVyUXbxukhLeHKV3VVqo7HdoQR9MrRfFmZYotn8ME=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc=
github.com/shirou/gopsutil v0.0.0-20190901111213-e4ec7b275ada/go.mod h1:WWnYX4lzhCH5h/3YBfyVA3VbLYjlMZZAQcW9ojMexNc=
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v1.0.0 h1:UVQPSSmc3qtTi+zPPkCXvZX9VvW/xT/NsRvKfwY81a8=
github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM=
github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM=
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/gunit v1.0.0/go.mod h1:qwPWnhz6pn0NnRBP++URONOVyNkPyr4SauJk4cUOwJs=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
github.com/sourcegraph/go-diff v0.5.1/go.mod h1:j2dHj3m8aZgQO8lMTcTnBcXkRRRqi34cd2MNlA9u1mE=
github.com/sourcegraph/go-diff v0.5.3/go.mod h1:v9JDtjCE4HHHCZGId75rg8gkKKa98RVjBcBGsVmMmak=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
github.com/spf13/cobra v1.1.3 h1:xghbfqPkxzxP3C/f3n5DdpAbdKLj4ZE4BWQI362l53M=
github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
github.com/spf13/viper v1.6.1/go.mod h1:t3iDnF5Jlj76alVNuyFBk5oUMCvsrkbvZK0WQdfDi5k=
github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg=
github.com/spf13/viper v1.7.1 h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk=
github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg=
github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2 h1:b6uOv7YOFK0TYG7HtkIgExQo+2RdLuwRft63jn2HWj8=
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
github.com/tdakkota/asciicheck v0.0.0-20200416190851-d7f85be797a2/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM=
github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM=
github.com/tetafro/godot v0.3.7/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0=
github.com/tetafro/godot v0.4.2/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0=
github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk=
github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk=
github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0=
github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0=
github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPfx6jb1bBgRFjl5lytqVqZXEaeqWP8lTEao=
github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4=
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tommy-muehle/go-mnd v1.1.1/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig=
github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig=
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85 h1:014iQD8i8EabPWK2XgUuOTxg5s2nhfDmq6GupskfUO8=
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85/go.mod h1:a7cilN64dG941IOXfhJhlH0qB92hxJ9A1ewrdUmJ6xo=
github.com/tonistiigi/go-immutable-radix v0.0.0-20170803185627-826af9ccf0fe/go.mod h1:/+MCh11CJf2oz0BXmlmqyopK/ad1rKkcOXPoYuPCJYU=
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0=
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305 h1:y/1cL5AL2oRcfzz8CAHHhR6kDDfIOT0WEyH5k40sccM=
github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305/go.mod h1:gXOLibKqQTRAVuVZ9gX7G9Ykky8ll8yb4slxsEMoY0c=
github.com/uber/jaeger-client-go v2.25.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
github.com/uber/jaeger-lib v2.2.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
github.com/ulikunitz/xz v0.5.7/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ultraware/funlen v0.0.2/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA=
github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA=
github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/uudashr/gocognit v1.0.1/go.mod h1:j44Ayx2KW4+oB6SWMv8KsmHzZrOInQav7D3cQMJ5JUM=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasthttp v1.2.0/go.mod h1:4vX61m6KN+xDduDNwXrhIAVZaZaZiQ1luJk8LWSxF3s=
github.com/valyala/quicktemplate v1.2.0/go.mod h1:EH+4AkTd43SvgIbQHYu59/cJyxDoOVRUAfrukLPuGJ4=
github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio=
github.com/vdemeester/k8s-pkg-credentialprovider v1.17.4/go.mod h1:inCTmtUdr5KJbreVojo06krnTgaeAz/Z7lynpPk/Q2c=
github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU=
github.com/vmware/govmomi v0.20.3/go.mod h1:URlwyTFZX72RmxtxuaFL2Uj3fD1JTvZdx59bHWk6aFU=
github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243 h1:R43TdZy32XXSXjJn7M/HhALJ9imq6ztLnChfYJpVDnM=
github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/xanzy/go-gitlab v0.31.0/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug=
github.com/xanzy/go-gitlab v0.32.0/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0=
go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA=
go.opencensus.io v0.19.1/go.mod h1:gug0GbSHa8Pafr0d2urOSgoXHZ6x/RUlaiT0d9pqb4A=
go.opencensus.io v0.19.2/go.mod h1:NO/8qkisMZLZ1FCsKNqtJPwc8/TaclWyY0B6wcYNg9M=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3 h1:8sGtKOrtQqkN1bp2AtX+misvLIlOmsEsNd+9NIcPEm8=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE=
gocloud.dev v0.19.0/go.mod h1:SmKwiR8YwIMMJvQBKLsC3fHNyMwXLw3PMDO+VVteJMI=
golang.org/x/build v0.0.0-20190314133821-5284462c4bec/go.mod h1:atTaCNAy0f16Ah5aV1gMSwgiKVHwu/JncqDpuRr7lS4=
golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191002192127-34f69633bfdc/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9 h1:phUcVbl53swtrUN8kQEXFhUxPlIlWyBfKmidCu7P95o=
golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190312203227-4b39c73a6495/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20210126221216-84987778548c/go.mod h1:I6l2HNBLBZEcrOoCpyKLdY2lHoRZ8lI4x60KMCQDft4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20181217174547-8f45f776aaf1/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mobile v0.0.0-20201217150744-e6ae53a27f4f/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.1-0.20200828183125-ce943fd02449/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181029044818-c44066c5c816/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181108082009-03003ca0c849/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190619014844-b5b0513f8c1b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974 h1:IX6qOQeG5uLjB/hjjwjedwfjND0hgjPMMyO1RoIXQNI=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/oauth2 v0.0.0-20180724155351-3d292e4d0cdc/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 h1:SQFwaSi55rU7vdNs9Yr0Z324VNlrF+0wMqRXT4St8ck=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181029174526-d69651ed3497/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181218192612-074acd46bca6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190514135907-3a4b5fb9f71f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190620070143-6f217b454f45/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191210023423-ac6580df4449/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200120151820-655fe14d7479/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200917073148-efd3b9a0ff20/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201013081832-0aaa2718063a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c h1:VwygUrnw9jn88c4u8GD3rZQbqrP/tgas88tPUbBxQrk=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI=
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181219222714-6e267b5cc78e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190221204921-83362c3779f5/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190706070813-72ffa07ba3db/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
golang.org/x/tools v0.0.0-20190719005602-e377ae9d6386/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190828213141-aed303cbaa74/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113232020-e2727e816f5a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200102140908-9497f49d5709/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204192400-7124308813f3/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200331202046-9d5940d49312/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200502202811-ed308ab3e770/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200612220849-54c614fe050c/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ=
google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181220000619-583d854617af/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.2.0/go.mod h1:IfRCZScioGtypHNTlz3gFk67J8uePVW7uDTBzXuIkhU=
google.golang.org/api v0.3.0/go.mod h1:IuvZyQh8jgscv8qWfQ4ABd8m7hEudgBFM/EdhA3BnXw=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.5.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.6.0/go.mod h1:btoxGiFvQNVUZQ8W08zLtrVS08CNpINPEfxXxgJL1Q4=
google.golang.org/api v0.6.1-0.20190607001116-5213b8090861/go.mod h1:btoxGiFvQNVUZQ8W08zLtrVS08CNpINPEfxXxgJL1Q4=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.25.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63 h1:YzfoEYWbODU5Fbt37+h7X16BWQbad7Q4S6gclTKFXM8=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
google.golang.org/grpc v1.29.1 h1:EC2SB8S04d2r73uptxphDSUG+kTKVgjRPF+N3xpxRB4=
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/gcfg.v1 v1.2.0/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.56.0 h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y=
gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/warnings.v0 v0.1.1/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200121175148-a6ecf24a6d71/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86 h1:OfFoIUYv/me30yv7XlMy4F9RJw8DEm8WQ6QG1Ph4bH0=
gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
gotest.tools/v3 v3.0.2 h1:kG1BFyqVHuQoVQiR1bWGnfz/fmHvvuiSPIV7rvl360E=
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20180920025451-e3ad64cb4ed3/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.5/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
k8s.io/api v0.0.0-20180904230853-4e7be11eab3f/go.mod h1:iuAfoD4hCxJ8Onx9kaTIt30j7jUFS00AXQi6QMi99vA=
k8s.io/api v0.17.4/go.mod h1:5qxx6vjmwUVG2nHQTKGlLts8Tbok8PzHl4vHtVFuZCA=
k8s.io/api v0.19.0/go.mod h1:I1K45XlvTrDjmj5LoM5LuP/KYrhWbjUKT/SoPG0qTjw=
k8s.io/apimachinery v0.0.0-20180904193909-def12e63c512/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0=
k8s.io/apimachinery v0.17.4/go.mod h1:gxLnyZcGNdZTCLnq3fgzyg2A5BVCHTNDFrw8AmuJ+0g=
k8s.io/apimachinery v0.19.0/go.mod h1:DnPGDnARWFvYa3pMHgSxtbZb7gpzzAZ1pTfaUNDVlmA=
k8s.io/apiserver v0.17.4/go.mod h1:5ZDQ6Xr5MNBxyi3iUZXS84QOhZl+W7Oq2us/29c0j9I=
k8s.io/client-go v0.0.0-20180910083459-2cefa64ff137/go.mod h1:7vJpHMYJwNQCWgzmNV+VYUl1zCObLyodBc8nIyt8L5s=
k8s.io/client-go v0.17.4/go.mod h1:ouF6o5pz3is8qU0/qYL2RnoxOPqgfuidYLowytyLJmc=
k8s.io/client-go v0.19.0/go.mod h1:H9E/VT95blcFQnlyShFgnFT9ZnJOAceiUHM3MlRC+mU=
k8s.io/cloud-provider v0.17.4/go.mod h1:XEjKDzfD+b9MTLXQFlDGkk6Ho8SGMpaU8Uugx/KNK9U=
k8s.io/code-generator v0.17.2/go.mod h1:DVmfPQgxQENqDIzVR2ddLXMH34qeszkKSdH/N+s+38s=
k8s.io/component-base v0.17.4/go.mod h1:5BRqHMbbQPm2kKu35v3G+CpVq4K0RJKC7TRioF0I9lE=
k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM=
k8s.io/csi-translation-lib v0.17.4/go.mod h1:CsxmjwxEI0tTNMzffIAcgR9lX4wOh6AKHdxQrT7L0oo=
k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/gengo v0.0.0-20190822140433-26a664648505/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE=
k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y=
k8s.io/kube-openapi v0.0.0-20180731170545-e3762e86a74c/go.mod h1:BXM9ceUBTj2QnfH2MK1odQs778ajze1RxcmP6S8RVVc=
k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E=
k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o=
k8s.io/kubernetes v1.11.10/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=
k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=
k8s.io/legacy-cloud-providers v0.17.4/go.mod h1:FikRNoD64ECjkxO36gkDgJeiQWwyZTuBkhu+yxOc1Js=
k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
k8s.io/utils v0.0.0-20200729134348-d5654de09c73/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
modernc.org/cc v1.0.0/go.mod h1:1Sk4//wdnYJiUIxnW8ddKpaOJCF37yAdqYnkxUpaYxw=
modernc.org/golex v1.0.0/go.mod h1:b/QX9oBD/LhixY6NDh+IdGv17hgB+51fET1i2kPSmvk=
modernc.org/mathutil v1.0.0/go.mod h1:wU0vUrJsVWBZ4P6e7xtFJEhFSNsfRLJ8H458uRjg03k=
modernc.org/strutil v1.0.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs=
modernc.org/xc v1.0.0/go.mod h1:mRNCo0bvLjGhHO9WsyuKVU4q0ceiDDDoEeWDJHrNx8I=
mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc=
mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4=
mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f/go.mod h1:4G1h5nDURzA3bwVMZIVpwbkw+04kSxk3rAtzlimaUJw=
mvdan.cc/unparam v0.0.0-20200501210554-b37ab49443f7/go.mod h1:HGC5lll35J70Y5v7vCGb9oLhHoScFwkHDJm/05RdSTc=
pack.ag/amqp v0.11.2/go.mod h1:4/cbmt4EJXSKlG6LCfWHoqmN0uFdy5i/+YFz+fTfhV4=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI=
sigs.k8s.io/structured-merge-diff v1.0.1-0.20191108220359-b1b620dd3f06/go.mod h1:/ULNhyfzRopfcjskuui0cTITekDduZ7ycKN3oUT9R18=
sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0=
sourcegraph.com/sqs/pbtypes v1.0.0/go.mod h1:3AciMUv4qUuRHRHhOG4TZOB+72GdPVz5k+c648qsFS4=
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 276 |
Multiple deployments with the same name?
|
It appears multiple deployments can have the same name, which is confusing.
```
$ dagger up
7:50PM FTL system | multiple deployments match the current directory, select one with `--deployment` deploymentPath=/home/shykes/dagger deployments=[
"dagger",
"dagger-dev",
"dagger-dev"
]
```
```
$ ls ~/.dagger/store/
dagger dagger-dev react
```
|
https://github.com/dagger/dagger/issues/276
|
https://github.com/dagger/dagger/pull/279
|
1ae0ce65e90e4ff3b67317c957636b64058e1f35
|
7cf1163a2b7055a1dae6cdf36aad295398e2487a
| 2021-04-05T19:52:47Z |
go
| 2021-04-06T00:09:35Z |
dagger/store.go
|
package dagger
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
"path"
"sync"
"github.com/google/uuid"
)
var (
ErrDeploymentExist = errors.New("deployment already exists")
ErrDeploymentNotExist = errors.New("deployment doesn't exist")
)
const (
defaultStoreRoot = "$HOME/.dagger/store"
)
type Store struct {
root string
l sync.RWMutex
deployments map[string]*DeploymentState
// Various indices for fast lookups
deploymentsByName map[string]*DeploymentState
deploymentsByPath map[string][]*DeploymentState
pathsByDeploymentID map[string][]string
}
func NewStore(root string) (*Store, error) {
store := &Store{
root: root,
deployments: make(map[string]*DeploymentState),
deploymentsByName: make(map[string]*DeploymentState),
deploymentsByPath: make(map[string][]*DeploymentState),
pathsByDeploymentID: make(map[string][]string),
}
return store, store.loadAll()
}
func DefaultStore() (*Store, error) {
if root := os.Getenv("DAGGER_STORE"); root != "" {
return NewStore(root)
}
return NewStore(os.ExpandEnv(defaultStoreRoot))
}
func (s *Store) deploymentPath(name string) string {
return path.Join(s.root, name, "deployment.json")
}
func (s *Store) loadAll() error {
files, err := os.ReadDir(s.root)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return nil
}
return err
}
for _, f := range files {
if !f.IsDir() {
continue
}
if err := s.loadDeployment(f.Name()); err != nil {
return err
}
}
return nil
}
func (s *Store) loadDeployment(name string) error {
data, err := os.ReadFile(s.deploymentPath(name))
if err != nil {
return err
}
var st DeploymentState
if err := json.Unmarshal(data, &st); err != nil {
return err
}
s.indexDeployment(&st)
return nil
}
func (s *Store) syncDeployment(r *DeploymentState) error {
p := s.deploymentPath(r.Name)
if err := os.MkdirAll(path.Dir(p), 0755); err != nil {
return err
}
data, err := json.MarshalIndent(r, "", " ")
if err != nil {
return err
}
if err := os.WriteFile(p, data, 0600); err != nil {
return err
}
s.reindexDeployment(r)
return nil
}
func (s *Store) indexDeployment(r *DeploymentState) {
s.deployments[r.ID] = r
s.deploymentsByName[r.Name] = r
mapPath := func(i Input) {
if i.Type != InputTypeDir {
return
}
s.deploymentsByPath[i.Dir.Path] = append(s.deploymentsByPath[i.Dir.Path], r)
s.pathsByDeploymentID[r.ID] = append(s.pathsByDeploymentID[r.ID], i.Dir.Path)
}
mapPath(r.PlanSource)
for _, i := range r.Inputs {
mapPath(i.Value)
}
}
func (s *Store) deindexDeployment(id string) {
r, ok := s.deployments[id]
if !ok {
return
}
delete(s.deployments, r.ID)
delete(s.deploymentsByName, r.Name)
for _, p := range s.pathsByDeploymentID[r.ID] {
// Remove this deployments from the path->deployment mapping
deployments := []*DeploymentState{}
for _, d := range s.deploymentsByPath[p] {
if d.ID == r.ID {
continue
}
deployments = append(deployments, d)
}
s.deploymentsByPath[p] = deployments
}
delete(s.pathsByDeploymentID, r.ID)
}
func (s *Store) reindexDeployment(r *DeploymentState) {
s.deindexDeployment(r.ID)
s.indexDeployment(r)
}
func (s *Store) CreateDeployment(ctx context.Context, st *DeploymentState) error {
s.l.Lock()
defer s.l.Unlock()
if _, ok := s.deploymentsByName[st.Name]; ok {
return fmt.Errorf("%s: %w", st.Name, ErrDeploymentExist)
}
st.ID = uuid.New().String()
return s.syncDeployment(st)
}
type UpdateOpts struct{}
func (s *Store) UpdateDeployment(ctx context.Context, r *DeploymentState, o *UpdateOpts) error {
s.l.Lock()
defer s.l.Unlock()
return s.syncDeployment(r)
}
type DeleteOpts struct{}
func (s *Store) DeleteDeployment(ctx context.Context, r *DeploymentState, o *DeleteOpts) error {
s.l.Lock()
defer s.l.Unlock()
if err := os.Remove(s.deploymentPath(r.Name)); err != nil {
return err
}
s.deindexDeployment(r.ID)
return nil
}
func (s *Store) LookupDeploymentByID(ctx context.Context, id string) (*DeploymentState, error) {
s.l.RLock()
defer s.l.RUnlock()
st, ok := s.deployments[id]
if !ok {
return nil, fmt.Errorf("%s: %w", id, ErrDeploymentNotExist)
}
return st, nil
}
func (s *Store) LookupDeploymentByName(ctx context.Context, name string) (*DeploymentState, error) {
s.l.RLock()
defer s.l.RUnlock()
st, ok := s.deploymentsByName[name]
if !ok {
return nil, fmt.Errorf("%s: %w", name, ErrDeploymentNotExist)
}
return st, nil
}
func (s *Store) LookupDeploymentByPath(ctx context.Context, path string) ([]*DeploymentState, error) {
s.l.RLock()
defer s.l.RUnlock()
st, ok := s.deploymentsByPath[path]
if !ok {
return []*DeploymentState{}, nil
}
return st, nil
}
func (s *Store) ListDeployments(ctx context.Context) ([]*DeploymentState, error) {
s.l.RLock()
defer s.l.RUnlock()
deployments := make([]*DeploymentState, 0, len(s.deployments))
for _, st := range s.deployments {
deployments = append(deployments, st)
}
return deployments, nil
}
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 848 |
`os.#JSON` package implementation to reinject variables inside cue tree
|
While working with dagger, I felt the need to process some variables inside a shell script, and reinject them into the cue tree.
As of now, it is only possible with the low level `op` operations, as `os.#File` only retrieves strings. An `os.#Json` or `os.#Export` might be necessary if we want `os.#Container` to become mainstream
|
https://github.com/dagger/dagger/issues/848
|
https://github.com/dagger/dagger/pull/306
|
12302f7aa13089fe583b58a3bfa3af2418228c76
|
308ade0a794d141ddf0143979bc94cd6af5263af
| 2021-07-27T08:08:35Z |
go
| 2021-04-09T21:03:56Z |
go.mod
|
module dagger.io/go
go 1.16
require (
cuelang.org/go v0.3.0
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db
github.com/containerd/console v1.0.1
github.com/docker/distribution v2.7.1+incompatible
github.com/emicklei/proto v1.9.0 // indirect
github.com/google/uuid v1.2.0
github.com/jaguilar/vt100 v0.0.0-20150826170717-2703a27b14ea
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db
github.com/moby/buildkit v0.8.2
github.com/morikuni/aec v1.0.0
github.com/opencontainers/go-digest v1.0.0
github.com/opentracing/opentracing-go v1.2.0
github.com/rs/zerolog v1.21.0
github.com/spf13/cobra v1.1.3
github.com/spf13/viper v1.7.1
github.com/stretchr/testify v1.7.0
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea
github.com/uber/jaeger-client-go v2.25.0+incompatible
go.mozilla.org/sops/v3 v3.7.0
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1
gopkg.in/yaml.v3 v3.0.0-20210107172259-749611fa9fcc
)
replace (
github.com/hashicorp/go-immutable-radix => github.com/tonistiigi/go-immutable-radix v0.0.0-20170803185627-826af9ccf0fe
github.com/jaguilar/vt100 => github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305
// genproto: corresponds to containerd
google.golang.org/genproto => google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63
)
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 848 |
`os.#JSON` package implementation to reinject variables inside cue tree
|
While working with dagger, I felt the need to process some variables inside a shell script, and reinject them into the cue tree.
As of now, it is only possible with the low level `op` operations, as `os.#File` only retrieves strings. An `os.#Json` or `os.#Export` might be necessary if we want `os.#Container` to become mainstream
|
https://github.com/dagger/dagger/issues/848
|
https://github.com/dagger/dagger/pull/306
|
12302f7aa13089fe583b58a3bfa3af2418228c76
|
308ade0a794d141ddf0143979bc94cd6af5263af
| 2021-07-27T08:08:35Z |
go
| 2021-04-09T21:03:56Z |
go.sum
|
bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
cloud.google.com/go v0.25.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.37.2/go.mod h1:H8IAquKe2L30IxoupDgqTaQvKSwF/c8prYHynGIWQbA=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.39.0/go.mod h1:rVLT6fkc8chs9sfPtFc1SBH6em7n+ZoXaG+87tDISts=
cloud.google.com/go v0.43.0/go.mod h1:BOSR3VbTLkk6FDC/TcffxP4NF/FFBGA5ku+jvKOP7pg=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.51.0/go.mod h1:hWtGJ6gnXH+KgDv+V0zFGDvpi07n3z8ZNj3T1RW0Gcw=
cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
cloud.google.com/go v0.57.0 h1:EpMNVUorLiZIELdMZbCYX/ByTFCdoYopYAGxaGVz9ms=
cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
code.gitea.io/sdk/gitea v0.12.0/go.mod h1:z3uwDV/b9Ls47NGukYM9XhnHtqPh/J+t40lsUrR6JDY=
contrib.go.opencensus.io/exporter/aws v0.0.0-20181029163544-2befc13012d0/go.mod h1:uu1P0UCM/6RbsMrgPa98ll8ZcHM858i/AD06a9aLRCA=
contrib.go.opencensus.io/exporter/ocagent v0.4.12/go.mod h1:450APlNTSR6FrvC3CTRqYosuDstRB9un7SOx2k/9ckA=
contrib.go.opencensus.io/exporter/ocagent v0.5.0/go.mod h1:ImxhfLRpxoYiSq891pBrLVhN+qmP8BTVvdH2YLs7Gl0=
contrib.go.opencensus.io/exporter/stackdriver v0.12.1/go.mod h1:iwB6wGarfphGGe/e5CWqyUk/cLzKnWsOKPVW3no6OTw=
contrib.go.opencensus.io/integrations/ocsql v0.1.4/go.mod h1:8DsSdjz3F+APR+0z0WkU1aRorQCFfRxvqjUUPMbF3fE=
contrib.go.opencensus.io/resource v0.1.1/go.mod h1:F361eGI91LCmW1I/Saf+rX0+OFcigGlFvXwEGEnkRLA=
cuelang.org/go v0.3.0 h1:4+Ugh8fshGfo6x1NoRCZSsLZxMdtUTU+fje+hzU1bvw=
cuelang.org/go v0.3.0/go.mod h1:jvMO35Q4D2D3m2ujAmKESICaYkjMbu5+D+2zIGuWTpQ=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
filippo.io/age v1.0.0-beta7 h1:RZiSK+N3KL2UwT82xiCavjYw8jJHzWMEUYePAukTpk0=
filippo.io/age v1.0.0-beta7/go.mod h1:chAuTrTb0FTTmKtvs6fQTGhYTvH9AigjN1uEUsvLdZ0=
filippo.io/edwards25519 v1.0.0-alpha.2/go.mod h1:X+pm78QAUPtFLi1z9PYIlS/bdDnvbCOGKtZ+ACWEf7o=
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
git.apache.org/thrift.git v0.12.0/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
github.com/AkihiroSuda/containerd-fuse-overlayfs v1.0.0/go.mod h1:0mMDvQFeLbbn1Wy8P2j3hwFhqBq+FKn8OZPno8WLmp8=
github.com/Azure/azure-amqp-common-go/v2 v2.1.0/go.mod h1:R8rea+gJRuJR6QxTir/XuEd+YuKoUiazDC/N96FiDEU=
github.com/Azure/azure-pipeline-go v0.2.1/go.mod h1:UGSo8XybXnIGZ3epmeBw7Jdz+HiUVpqIlpz/HKHylF4=
github.com/Azure/azure-pipeline-go v0.2.2/go.mod h1:4rQ/NZncSvGqNkkOsNpOU1tgoNuIlp9AfUH5G1tvCHc=
github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v19.1.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v29.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v30.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v31.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v35.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v38.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v42.3.0+incompatible h1:PAHkmPqd/vQV4LJcqzEUM1elCyTMWjbrO8oFMl0dvBE=
github.com/Azure/azure-sdk-for-go v42.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-service-bus-go v0.9.1/go.mod h1:yzBx6/BUGfjfeqbRZny9AQIbIe3AcV9WZbAdpkoXOa0=
github.com/Azure/azure-storage-blob-go v0.8.0/go.mod h1:lPI3aLPpuLTeUwh1sViKXFxwl2B6teiRqI0deQUvsw0=
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8=
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
github.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v10.15.5+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v12.0.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v14.1.1+incompatible h1:m2F62e1Zk5DV3HENGdH/wEuzvJZIynHG4fHF7oiQwgE=
github.com/Azure/go-autorest v14.1.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest/autorest v0.1.0/go.mod h1:AKyIcETwSUFxIcs/Wnq/C+kwCtlEYGUVd7FPNb2slmg=
github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0=
github.com/Azure/go-autorest/autorest v0.9.6/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630=
github.com/Azure/go-autorest/autorest v0.10.2 h1:NuSF3gXetiHyUbVdneJMEVyPUYAe5wh+aN08JYAf1tI=
github.com/Azure/go-autorest/autorest v0.10.2/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630=
github.com/Azure/go-autorest/autorest/adal v0.1.0/go.mod h1:MeS4XhScH55IST095THyTxElntu7WqB7pNbZo8Q5G3E=
github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc=
github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/adal v0.8.2/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/adal v0.8.3 h1:O1AGG9Xig71FxdX9HO5pGNyZ7TbSyHaVg+5eJO/jSGw=
github.com/Azure/go-autorest/autorest/adal v0.8.3/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
github.com/Azure/go-autorest/autorest/azure/auth v0.1.0/go.mod h1:Gf7/i2FUpyb/sGBLIFxTBzrNzBo7aPXXE3ZVeDRwdpM=
github.com/Azure/go-autorest/autorest/azure/auth v0.4.2 h1:iM6UAvjR97ZIeR93qTcwpKNMpV+/FTWjwEbuPD495Tk=
github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM=
github.com/Azure/go-autorest/autorest/azure/cli v0.1.0/go.mod h1:Dk8CUAt/b/PzkfeRsWzVG9Yj3ps8mS8ECztu43rdU8U=
github.com/Azure/go-autorest/autorest/azure/cli v0.3.1 h1:LXl088ZQlP0SBppGFsRZonW6hSvwgL5gRByMbvUbx8U=
github.com/Azure/go-autorest/autorest/azure/cli v0.3.1/go.mod h1:ZG5p860J94/0kI9mNJVoIoLgXcirM2gF5i2kWloofxw=
github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
github.com/Azure/go-autorest/autorest/date v0.2.0 h1:yW+Zlqf26583pE43KhfnhFcdmSWlm5Ew6bxipnr/tbM=
github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g=
github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
github.com/Azure/go-autorest/autorest/mocks v0.3.0 h1:qJumjCaCudz+OcqE9/XtEPfvtOjOmKaui4EOpFI6zZc=
github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM=
github.com/Azure/go-autorest/autorest/to v0.2.0/go.mod h1:GunWKJp1AEqgMaGLV+iocmRAJWqST1wQYhyyjXJ3SJc=
github.com/Azure/go-autorest/autorest/to v0.3.0 h1:zebkZaadz7+wIQYgC7GXaz3Wb28yKYfVkkBKwc38VF8=
github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA=
github.com/Azure/go-autorest/autorest/validation v0.1.0/go.mod h1:Ha3z/SqBeaalWQvokg3NZAlQTalVMtOIAs1aGK7G6u8=
github.com/Azure/go-autorest/autorest/validation v0.2.0 h1:15vMO4y76dehZSq7pAaOLQxC6dZYsSrj2GQpflyM/L4=
github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI=
github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
github.com/Azure/go-autorest/tracing v0.1.0/go.mod h1:ROEEAFwXycQw7Sn3DXNtEedEvdeRAgDr0izn4z5Ij88=
github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Djarvur/go-err113 v0.0.0-20200410182137-af658d038157/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
github.com/Djarvur/go-err113 v0.1.0/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
github.com/GoogleCloudPlatform/cloudsql-proxy v0.0.0-20191009163259-e802c2cb94ae/go.mod h1:mjwGPas4yKduTyubHvD1Atl9r1rUq8DfVy+gkVvZ+oo=
github.com/GoogleCloudPlatform/k8s-cloud-provider v0.0.0-20190822182118-27a4ced34534/go.mod h1:iroGtC8B3tQiqtds1l+mgk/BBOrxbqjH+eUfFQYRc14=
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db h1:Zkf5kwhxdW0xV7WM/crqIcOP5LCFGnAmumWSFAewJ74=
github.com/KromDaniel/jonson v0.0.0-20180630143114-d2f9c3c389db/go.mod h1:RU+6d0CNIRSp6yo1mXLIIrnFa/3LHhvcDVLVJyovptM=
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/Masterminds/semver/v3 v3.0.3/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/Masterminds/semver/v3 v3.1.0/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/go-winio v0.4.15-0.20200908182639-5b44b70ab3ab/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/go-winio v0.4.15 h1:qkLXKzb1QoVatRyd/YlXZ/Kg0m5K3SPuoD82jjSOaBc=
github.com/Microsoft/go-winio v0.4.15/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
github.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ=
github.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg38RRsjT5y8=
github.com/Microsoft/hcsshim v0.8.10 h1:k5wTrpnVU2/xv8ZuzGkbXVd3js5zJ8RnumPo5RxiIxU=
github.com/Microsoft/hcsshim v0.8.10/go.mod h1:g5uw8EV2mAlzqe94tfNBNdr89fnbD/n3HV0OhsddkmM=
github.com/Microsoft/hcsshim/test v0.0.0-20200826032352-301c83a30e7c/go.mod h1:30A5igQ91GEmhYJF8TaRP79pMBOYynRsyOByfVV0dU4=
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw=
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM=
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/alecthomas/kingpin v2.2.6+incompatible/go.mod h1:59OFYbFVLKQKq+mqrL6Rw5bR0c3ACQaawgXx0QYndlE=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apex/log v1.1.4/go.mod h1:AlpoD9aScyQfJDVHmLMEcx4oU6LqzkWp4Mg9GdAcEvQ=
github.com/apex/log v1.3.0/go.mod h1:jd8Vpsr46WAe3EZSQ/IUMs2qQD/GOycT5rPWCO1yGcs=
github.com/apex/logs v0.0.4/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo=
github.com/aphistic/golf v0.0.0-20180712155816-02c07f170c5a/go.mod h1:3NqKYiepwy8kCu4PNA+aP7WUV72eXWJeP9/r3/K9aLE=
github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3stzu0Xys=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.15.90/go.mod h1:es1KtYUFs7le0xQ3rOihkuoVD90z7D0fR2Qm4S00/gU=
github.com/aws/aws-sdk-go v1.16.26/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.18/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.45/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.25.11/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.27.1/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.31.6/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
github.com/aws/aws-sdk-go v1.37.18 h1:SRdWLg+DqMFWX8HB3UvXyAoZpw9IDIUYnSTwgzOYbqg=
github.com/aws/aws-sdk-go v1.37.18/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA=
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb/go.mod h1:PkYb9DJNAwrSvRx5DYA+gUcOIgTGVMNkfSCbZM8cWpI=
github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bombsimon/wsl/v2 v2.0.0/go.mod h1:mf25kr/SqFEPhhcxW1+7pxzGlW+hIl/hYTKY95VwV8U=
github.com/bombsimon/wsl/v2 v2.2.0/go.mod h1:Azh8c3XGEJl9LyX0/sFC+CKMc7Ssgua0g+6abzXN4Pg=
github.com/bombsimon/wsl/v3 v3.0.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc=
github.com/bombsimon/wsl/v3 v3.1.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk=
github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8=
github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50=
github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE=
github.com/caarlos0/ctrlc v1.0.0/go.mod h1:CdXpj4rmq0q/1Eb44M9zi2nKB0QraNKuRGYGrrHhcQw=
github.com/campoy/unique v0.0.0-20180121183637-88950e537e7e/go.mod h1:9IOqJGCPMSc6E5ydlp5NIonxObaeu/Iub/X03EKPVYo=
github.com/cavaliercoder/go-cpio v0.0.0-20180626203310-925f9528c45e/go.mod h1:oDpT4efm8tSYHXV5tHSdRvBet/b/QzxZ+XyyPehvm3A=
github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4=
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg=
github.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLIdUjrmSXlK9pkrsDlLHbO8jiB8X8JnOc=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/cockroachdb/apd/v2 v2.0.1 h1:y1Rh3tEU89D+7Tgbw+lp52T6p/GJLpDmNvr10UWqLTE=
github.com/cockroachdb/apd/v2 v2.0.1/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw=
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
github.com/codahale/hdrhistogram v0.0.0-20160425231609-f8ad88b59a58 h1:hHWif/4GirK3P5uvCyyj941XSVIQDzuJhbEguCICdPE=
github.com/codahale/hdrhistogram v0.0.0-20160425231609-f8ad88b59a58/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
github.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko=
github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM=
github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340 h1:9atoWyI9RtXFwf7UDbme/6M8Ud0rFrx+Q3ZWgSnsxtw=
github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo=
github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw=
github.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE=
github.com/containerd/console v1.0.0/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE=
github.com/containerd/console v1.0.1 h1:u7SFAJyRqWcG6ogaMAx3KjSTy1e3hT9QxqX7Jco7dRc=
github.com/containerd/console v1.0.1/go.mod h1:XUsP6YE/mKtz6bxc+I8UiKKTP04qjQL4qcS3XoQ5xkw=
github.com/containerd/containerd v1.3.0-beta.2.0.20190828155532-0293cbd26c69/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.3.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.4.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/containerd v1.4.1-0.20201117152358-0edc412565dc h1:XbZ/DDsFDigeOQ9M3YXhvE6d1AEHdxKAzIgkswip7dI=
github.com/containerd/containerd v1.4.1-0.20201117152358-0edc412565dc/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=
github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=
github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe h1:PEmIrUvwG9Yyv+0WKZqjXfSFDeZjs/q15g0m08BYS9k=
github.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo=
github.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI=
github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b h1:qUtCegLdOUVfVJOw+KDg6eJyE1TGvLlkGEd1091kSSQ=
github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0=
github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU=
github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0=
github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g=
github.com/containerd/go-runc v0.0.0-20201020171139-16b287bc67d0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok=
github.com/containerd/stargz-snapshotter v0.0.0-20201027054423-3a04e4c2c116/go.mod h1:o59b3PCKVAf9jjiKtCc/9hLAd+5p/rfhBfm6aBcTEr4=
github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o=
github.com/containerd/ttrpc v1.0.1 h1:IfVOxKbjyBn9maoye2JN95pgGYOmPkQVqxtOu7rtNIc=
github.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y=
github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc=
github.com/containerd/typeurl v1.0.1 h1:PvuK4E3D5S5q6IqsPDCy928FhP0LUIGcmZ/Yhgp5Djw=
github.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg=
github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY=
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/coreos/pkg v0.0.0-20180108230652-97fdf19511ea/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=
github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0=
github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY=
github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dimchansky/utfbom v1.1.0 h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4=
github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=
github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=
github.com/docker/cli v0.0.0-20190925022749-754388324470/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v20.10.0-beta1.0.20201029214301-1d20b15adc38+incompatible h1:r99CiNpN5pxrSuSH36suYxrbLxFOhBvQ0sEH6624MHs=
github.com/docker/cli v20.10.0-beta1.0.20201029214301-1d20b15adc38+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY=
github.com/docker/distribution v2.6.0-rc.1.0.20180327202408-83389a148052+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v0.0.0-20200511152416-a93e9eb0e95c/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v0.7.3-0.20190327010347-be7ac8be2ae0/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v1.4.2-0.20180531152204-71cd53e4a197/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v17.12.0-ce-rc1.0.20200730172259-9f28837c1d93+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v20.10.0-beta1.0.20201110211921-af34b94a78a1+incompatible h1:J2OhsbfqoBRRT048iD/tqXBvEQWQATQ8vew6LqQmDSU=
github.com/docker/docker v20.10.0-beta1.0.20201110211921-af34b94a78a1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker-credential-helpers v0.6.3 h1:zI2p9+1NQYdnG6sMU26EX4aVGlqbInSQxQXLvzJ4RPQ=
github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c h1:+pKlWGMw7gf6bQ+oDZB4KHQFypsfjYlq/C4rfL7D3g8=
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=
github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI=
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/libnetwork v0.8.0-dev.2.0.20200917202933-d0951081b35f h1:jC/ZXgYdzCUuKFkKGNiekhnIkGfUrdelEqvg4Miv440=
github.com/docker/libnetwork v0.8.0-dev.2.0.20200917202933-d0951081b35f/go.mod h1:93m0aTqz6z+g32wla4l4WxTrdtvBRmVzYRkYvasA5Z8=
github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE=
github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/proto v1.6.15/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
github.com/emicklei/proto v1.9.0 h1:l0QiNT6Qs7Yj0Mb4X6dnWBQer4ebei2BFcgQLbGqUDc=
github.com/emicklei/proto v1.9.0/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/fortytw2/leaktest v1.2.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-critic/go-critic v0.4.1/go.mod h1:7/14rZGnZbY6E38VEGk2kVhoq6itzc1E68facVDK23g=
github.com/go-critic/go-critic v0.4.3/go.mod h1:j4O3D4RoIwRqlZw5jJpx0BNfXWWbpcJoKu5cYSe4YmQ=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc=
github.com/go-lintpack/lintpack v0.5.2/go.mod h1:NwZuYi2nUHho8XEIZ6SIxihrnPoqBTDqfpXvXAN0sXM=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8=
github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo=
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4=
github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ=
github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY=
github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY=
github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086/go.mod h1:mP93XdblcopXwlyN4X4uodxXQhldPGZbcEJIimQHrkg=
github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw=
github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU=
github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30/go.mod h1:SV2ur98SGypH1UjcPpCatrV5hPazG6+IfNHbkDXBRrk=
github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI=
github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8/go.mod h1:WoMrjiy4zvdS+Bg6z9jZH82QXwkcgCBX6nOfnmdaHks=
github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc=
github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gofrs/flock v0.0.0-20190320160742-5135e617513b/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
github.com/gofrs/flock v0.7.3 h1:I0EKY9l8HZCXTMYC4F80vwT6KNypV9uYKP3Alm/hjmQ=
github.com/gofrs/flock v0.7.3/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
github.com/gogo/googleapis v1.2.0/go.mod h1:Njal3psf3qN6dwBtQfUmBZh2ybovJ0tlu3o/AC7HYjU=
github.com/gogo/googleapis v1.3.2 h1:kX1es4djPJrsDhY7aZKJy7aZasdcB5oSOEphMjSB53c=
github.com/gogo/googleapis v1.3.2/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls=
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4=
github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk=
github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0=
github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8=
github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o=
github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU=
github.com/golangci/gocyclo v0.0.0-20180528144436-0a533e8fa43d/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU=
github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU=
github.com/golangci/golangci-lint v1.23.7/go.mod h1:g/38bxfhp4rI7zeWSxcdIeHTQGS58TCak8FYcyCmavQ=
github.com/golangci/golangci-lint v1.27.0/go.mod h1:+eZALfxIuthdrHPtfM7w/R3POJLjHDfJJw8XZl9xOng=
github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU=
github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg=
github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o=
github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA=
github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA=
github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI=
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
github.com/golangci/revgrep v0.0.0-20180812185044-276a5c0a1039/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/crfs v0.0.0-20191108021818-71d77da419c9/go.mod h1:etGhoOqfwPkooV6aqoX3eBGQOJblqdoc9XvWOeuxpPw=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0 h1:/QaMHBdZ26BB3SSst0Iwl10Epc+xhTquomWX0oZEB6w=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-containerregistry v0.0.0-20191010200024-a3d713f9b7f8/go.mod h1:KyKXa9ciM8+lgMXwOVsXi7UxGrsf9mM61Mzs+xKUrKE=
github.com/google/go-containerregistry v0.1.2/go.mod h1:GPivBPgdAyd2SU+vf6EpsgOtWDuPqjW0hJZt4rNdTZ4=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-github/v28 v28.1.1/go.mod h1:bsqJWQX05omyWVmc00nEUql9mhQyv38lDZ8kPZcQVoM=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/go-replayers/grpcreplay v0.1.0/go.mod h1:8Ig2Idjpr6gifRd6pNVggX6TC1Zw6Jx74AKp7QNH2QE=
github.com/google/go-replayers/httpreplay v0.1.0/go.mod h1:YKZViNhiGgqdBlUbI2MwGpq4pXxNmhJLPHQ7cv2b5no=
github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/rpmpack v0.0.0-20191226140753-aa36bfddb3a0/go.mod h1:RaTPr0KUf2K7fnZYLNDrr8rxAamWs3iNywJLtQ2AzBg=
github.com/google/shlex v0.0.0-20181106134648-c34317bd91bf/go.mod h1:RpwtwJQFrIEPstU94h88MWPXP2ektJZ8cZ0YntAmXiE=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs=
github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/wire v0.3.0/go.mod h1:i1DMg/Lu8Sz5yYl25iOdmc5CT5qusaa+zmRWs16741s=
github.com/google/wire v0.4.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU=
github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
github.com/googleapis/gax-go v2.0.2+incompatible h1:silFMLAnr330+NRuag/VjIGF7TLp/LBrV2CJKFLWEww=
github.com/googleapis/gax-go v2.0.2+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
github.com/googleapis/gnostic v0.2.2/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg=
github.com/gookit/color v1.2.4/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg=
github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/goreleaser/goreleaser v0.136.0/go.mod h1:wiKrPUeSNh6Wu8nUHxZydSOVQ/OZvOaO7DTtFqie904=
github.com/goreleaser/nfpm v1.2.1/go.mod h1:TtWrABZozuLOttX2uDlYyECfQX7x5XYkVxhjYcR6G9w=
github.com/goreleaser/nfpm v1.3.0/go.mod h1:w0p7Kc9TAUgWMyrub63ex3M2Mgw88M4GZXoTq5UCb40=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE=
github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE=
github.com/gotestyourself/gotestyourself v2.2.0+incompatible/go.mod h1:zZKM6oeNM8k+FRljX1mnzVYeS8wiGgQyvST1/GafPbY=
github.com/goware/prefixer v0.0.0-20160118172347-395022866408 h1:Y9iQJfEqnN3/Nce9cOegemcy/9Ai5k3huT6E80F3zaw=
github.com/goware/prefixer v0.0.0-20160118172347-395022866408/go.mod h1:PE1ycukgRPJ7bJ9a1fdfQ9j8i/cEcRAoLZzbxYpNB/s=
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 h1:0IKlLyQ3Hs9nDaiK5cSHAGmcQEIC8l2Ts1u6x5Dfrqg=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0/go.mod h1:mJzapYve32yjrKlk9GbyCZHuPgZsrbyIbyKhSzOpg6s=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.6.2/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.2/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645 h1:MJG/KsmcqMwFAkh8mTnAwhyKoB+sTAnY4CACC110tbU=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw=
github.com/hanwen/go-fuse v1.0.0/go.mod h1:unqXarDXqzAk0rt98O2tVndEPIpUgLD9+rwFisZH3Ok=
github.com/hanwen/go-fuse/v2 v2.0.3/go.mod h1:0EQM6aH2ctVpvZ6a+onrQ/vaykxh2GH7hy3e13vzTUY=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI=
github.com/hashicorp/go-hclog v0.8.0/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I=
github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
github.com/hashicorp/go-retryablehttp v0.6.4/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
github.com/hashicorp/go-retryablehttp v0.6.6 h1:HJunrbHTDDbBb/ay4kxa1n+dLmttUlnP3V9oNE4hmsM=
github.com/hashicorp/go-retryablehttp v0.6.6/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
github.com/hashicorp/go-rootcerts v1.0.1 h1:DMo4fmknnz0E0evoNYnV48RjWndOsmd6OW+09R3cEP8=
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc=
github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A=
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.3/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/hashicorp/uuid v0.0.0-20160311170451-ebb0a03e909c/go.mod h1:fHzc09UnyJyqyW+bFuq864eh+wC7dj65aXmXLRe5to0=
github.com/hashicorp/vault/api v1.0.4 h1:j08Or/wryXT4AcHj1oCbMd7IijXcKzYUGw59LGu9onU=
github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoIospckxBxk6Q=
github.com/hashicorp/vault/sdk v0.1.13 h1:mOEPeOhT7jl0J4AMl1E705+BcmeRs1VmKNb9F0sMLy8=
github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M=
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
github.com/howeyc/gopass v0.0.0-20170109162249-bf9dde6d0d2c h1:kQWxfPIHVLbgLzphqk3QUflDy9QdksZR4ygR807bpy0=
github.com/howeyc/gopass v0.0.0-20170109162249-bf9dde6d0d2c/go.mod h1:lADxMC39cJJqL93Duh1xhAs4I2Zs8mKS89XWXFGp9cs=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/ishidawataru/sctp v0.0.0-20191218070446-00ab2ac2db07/go.mod h1:co9pwDoBCm1kGxawmb4sPq0cSIOOWNPT4KnHotMP1Zg=
github.com/jarcoal/httpmock v1.0.5/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik=
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a/go.mod h1:xRskid8CManxVta/ALEhJha/pweKBaVG6fWgc0yH25s=
github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0=
github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v0.0.0-20180701071628-ab8a2e0c74be/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.4.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU=
github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho=
github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-shellwords v1.0.10/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-zglob v0.0.1/go.mod h1:9fxibJccNxU2cnpIKLRRFA7zX7qhkJIQWBb449FYHOo=
github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk=
github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4=
github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ=
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.3.1 h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A=
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/moby/buildkit v0.8.2 h1:kvb0cLWss4mOhCxcXSTENzzA+t1JR1eIyXFhDrI+73g=
github.com/moby/buildkit v0.8.2/go.mod h1:5PZi7ALzuxG604ggYSeN+rzC+CyJscuXS7WetulJr1Y=
github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
github.com/moby/sys/mount v0.1.0/go.mod h1:FVQFLDRWwyBjDTBNQXDlWnSFREqOo3OKX9aqhmeoo74=
github.com/moby/sys/mount v0.1.1 h1:mdhBytJ1SMmMat0gtzWWjFX/87K5j6E/7Q5z7rR0cZY=
github.com/moby/sys/mount v0.1.1/go.mod h1:FVQFLDRWwyBjDTBNQXDlWnSFREqOo3OKX9aqhmeoo74=
github.com/moby/sys/mountinfo v0.1.0/go.mod h1:w2t2Avltqx8vE7gX5l+QiBKxODu2TX0+Syr3h52Tw4o=
github.com/moby/sys/mountinfo v0.1.3/go.mod h1:w2t2Avltqx8vE7gX5l+QiBKxODu2TX0+Syr3h52Tw4o=
github.com/moby/sys/mountinfo v0.4.0 h1:1KInV3Huv18akCu58V7lzNlt+jFmqlu1EaErnEHE/VM=
github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A=
github.com/moby/term v0.0.0-20200915141129-7f0af18e79f2 h1:SPoLlS9qUUnXcIY4pvA4CTwYjk0Is5f4UPEkeESr53k=
github.com/moby/term v0.0.0-20200915141129-7f0af18e79f2/go.mod h1:TjQg8pa4iejrUrjiz0MCtMV38jdMNW4doKSiBrEvCQQ=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180320133207-05fbef0ca5da/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/mozilla/tls-observatory v0.0.0-20190404164649-a3c1b6cfecfd/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk=
github.com/mozilla/tls-observatory v0.0.0-20200317151703-4fa42e1c2dee/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk=
github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de h1:D5x39vF5KCwKQaw+OC9ZPiLVHXz3UFw2+psEX+gYcto=
github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de/go.mod h1:kJun4WP5gFuHZgRjZUWWuH1DTxCtxbHDOIJsudS8jzY=
github.com/mrunalp/fileutils v0.0.0-20200520151820-abd8a0e76976/go.mod h1:x8F1gnqOkIEiO4rqoeEEEqQbo7HjGMTvyoq3gej4iT0=
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
github.com/nakabonne/nestif v0.3.0/go.mod h1:dI314BppzXjJ4HsCnbo7XzrJHPszZsjnk5wEBSYHI2c=
github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU=
github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg=
github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.8.1/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI=
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v1.0.0-rc10/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v1.0.0-rc92 h1:+IczUKCRzDzFDnw99O/PAqrcBBCoRp9xN3cB1SYSNS4=
github.com/opencontainers/runc v1.0.0-rc92/go.mod h1:X1zlU4p7wOlX4+WRCz+hvlRv8phdL7UqbYD+vQwNMmE=
github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-spec v1.0.3-0.20200728170252-4d89ac9fbff6 h1:NhsM2gc769rVWDqJvapK37r+7+CBXI8xHhnfnt8uQsg=
github.com/opencontainers/runtime-spec v1.0.3-0.20200728170252-4d89ac9fbff6/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs=
github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE=
github.com/opencontainers/selinux v1.8.0 h1:+77ba4ar4jsCbL1GLbFL8fFM57w6suPfSS9PDLDY7KM=
github.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo=
github.com/opentracing-contrib/go-stdlib v1.0.0/go.mod h1:qtI1ogk+2JhVPIXVc6q+NHziSmy2W5GbdQZFUHADCBU=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
github.com/openzipkin/zipkin-go v0.1.3/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
github.com/ory/dockertest v3.3.4+incompatible h1:VrpM6Gqg7CrPm3bL4Wm1skO+zFWLbh7/Xb5kGEbJRh8=
github.com/ory/dockertest v3.3.4+incompatible/go.mod h1:1vX4m9wsvi00u5bseYwXaSnhNrne+V0E6LAcBILJdPs=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.8.0 h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw=
github.com/pelletier/go-toml v1.8.0/go.mod h1:D6yutnOGMveHEPV7VQOuvI/gXY61bv+9bAOTRnLElKs=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw=
github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.5.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.0-20190522114515-bc1a522cf7b1/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.5 h1:3+auTFlqw+ZaQYJARz6ArODtkaIwtvBTx3N2NehQlL8=
github.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI=
github.com/quasilyte/go-ruleguard v0.1.2-0.20200318202121-b00d7a75d3d8/go.mod h1:CGFX09Ci3pq9QZdj86B+VGIdNj4VyCo2iPOGS9esB/k=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/remyoudompheng/bigfft v0.0.0-20170806203942-52369c62f446/go.mod h1:uYEyJGbgTkfkS4+E/PavXkNJcbFIpEtjt2B0KDQ5+9M=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rogpeppe/fastuuid v1.1.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.21.0 h1:Q3vdXlfLNT+OftyBHsU0Y445MD+8m8axjKgf2si0QcM=
github.com/rs/zerolog v1.21.0/go.mod h1:ZPhntP/xmq1nnND05hhpAh2QMhSsA4UN3MGZ6O2J3hM=
github.com/rubiojr/go-vhd v0.0.0-20160810183302-0bfd3b39853c/go.mod h1:DM5xW0nvfNNm2uytzsvhI3OnX8uzaRAg8UX/CnDqbto=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryancurrah/gomodguard v1.0.4/go.mod h1:9T/Cfuxs5StfsocWr4WzDL36HqnX0fVb9d5fSEaLhoE=
github.com/ryancurrah/gomodguard v1.1.0/go.mod h1:4O8tr7hBODaGE6VIhfJDHcwzh5GUccKSJBU0UMXJFVM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=
github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
github.com/sassoftware/go-rpmutils v0.0.0-20190420191620-a8f1baeba37b/go.mod h1:am+Fp8Bt506lA3Rk3QCmSqmYmLMnPDhdDUcosQCAx+I=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
github.com/securego/gosec v0.0.0-20200103095621-79fbf3af8d83/go.mod h1:vvbZ2Ae7AzSq3/kywjUDxSNq2SJ27RxCz2un0H3ePqE=
github.com/securego/gosec v0.0.0-20200401082031-e946c8c39989/go.mod h1:i9l/TNj+yDFh9SZXUTvspXTjbFXgZGP/UvhU1S65A4A=
github.com/securego/gosec/v2 v2.3.0/go.mod h1:UzeVyUXbxukhLeHKV3VVqo7HdoQR9MrRfFmZYotn8ME=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc=
github.com/shirou/gopsutil v0.0.0-20190901111213-e4ec7b275ada/go.mod h1:WWnYX4lzhCH5h/3YBfyVA3VbLYjlMZZAQcW9ojMexNc=
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v1.0.0 h1:UVQPSSmc3qtTi+zPPkCXvZX9VvW/xT/NsRvKfwY81a8=
github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM=
github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM=
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/goconvey v0.0.0-20190710185942-9d28bd7c0945/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/gunit v1.0.0/go.mod h1:qwPWnhz6pn0NnRBP++URONOVyNkPyr4SauJk4cUOwJs=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
github.com/sourcegraph/go-diff v0.5.1/go.mod h1:j2dHj3m8aZgQO8lMTcTnBcXkRRRqi34cd2MNlA9u1mE=
github.com/sourcegraph/go-diff v0.5.3/go.mod h1:v9JDtjCE4HHHCZGId75rg8gkKKa98RVjBcBGsVmMmak=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
github.com/spf13/cobra v1.1.3 h1:xghbfqPkxzxP3C/f3n5DdpAbdKLj4ZE4BWQI362l53M=
github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
github.com/spf13/viper v1.6.1/go.mod h1:t3iDnF5Jlj76alVNuyFBk5oUMCvsrkbvZK0WQdfDi5k=
github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg=
github.com/spf13/viper v1.7.1 h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk=
github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg=
github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2 h1:b6uOv7YOFK0TYG7HtkIgExQo+2RdLuwRft63jn2HWj8=
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
github.com/tdakkota/asciicheck v0.0.0-20200416190851-d7f85be797a2/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM=
github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM=
github.com/tetafro/godot v0.3.7/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0=
github.com/tetafro/godot v0.4.2/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0=
github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk=
github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk=
github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0=
github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0=
github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPfx6jb1bBgRFjl5lytqVqZXEaeqWP8lTEao=
github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4=
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tommy-muehle/go-mnd v1.1.1/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig=
github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig=
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85 h1:014iQD8i8EabPWK2XgUuOTxg5s2nhfDmq6GupskfUO8=
github.com/tonistiigi/fsutil v0.0.0-20201103201449-0834f99b7b85/go.mod h1:a7cilN64dG941IOXfhJhlH0qB92hxJ9A1ewrdUmJ6xo=
github.com/tonistiigi/go-immutable-radix v0.0.0-20170803185627-826af9ccf0fe/go.mod h1:/+MCh11CJf2oz0BXmlmqyopK/ad1rKkcOXPoYuPCJYU=
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0=
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305 h1:y/1cL5AL2oRcfzz8CAHHhR6kDDfIOT0WEyH5k40sccM=
github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305/go.mod h1:gXOLibKqQTRAVuVZ9gX7G9Ykky8ll8yb4slxsEMoY0c=
github.com/uber/jaeger-client-go v2.25.0+incompatible h1:IxcNZ7WRY1Y3G4poYlx24szfsn/3LvK9QHCq9oQw8+U=
github.com/uber/jaeger-client-go v2.25.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
github.com/uber/jaeger-lib v2.2.0+incompatible h1:MxZXOiR2JuoANZ3J6DE/U0kSFv/eJ/GfSYVCjK7dyaw=
github.com/uber/jaeger-lib v2.2.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
github.com/ulikunitz/xz v0.5.7/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ultraware/funlen v0.0.2/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA=
github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA=
github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/uudashr/gocognit v1.0.1/go.mod h1:j44Ayx2KW4+oB6SWMv8KsmHzZrOInQav7D3cQMJ5JUM=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasthttp v1.2.0/go.mod h1:4vX61m6KN+xDduDNwXrhIAVZaZaZiQ1luJk8LWSxF3s=
github.com/valyala/quicktemplate v1.2.0/go.mod h1:EH+4AkTd43SvgIbQHYu59/cJyxDoOVRUAfrukLPuGJ4=
github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio=
github.com/vdemeester/k8s-pkg-credentialprovider v1.17.4/go.mod h1:inCTmtUdr5KJbreVojo06krnTgaeAz/Z7lynpPk/Q2c=
github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU=
github.com/vmware/govmomi v0.20.3/go.mod h1:URlwyTFZX72RmxtxuaFL2Uj3fD1JTvZdx59bHWk6aFU=
github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/willf/bitset v1.1.11 h1:N7Z7E9UvjW+sGsEl7k/SJrvY2reP1A07MrGuCjIOjRE=
github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI=
github.com/xanzy/go-gitlab v0.31.0/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug=
github.com/xanzy/go-gitlab v0.32.0/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
go.mozilla.org/gopgagent v0.0.0-20170926210634-4d7ea76ff71a h1:N7VD+PwpJME2ZfQT8+ejxwA4Ow10IkGbU0MGf94ll8k=
go.mozilla.org/gopgagent v0.0.0-20170926210634-4d7ea76ff71a/go.mod h1:YDKUvO0b//78PaaEro6CAPH6NqohCmL2Cwju5XI2HoE=
go.mozilla.org/sops/v3 v3.7.0 h1:JuurncZrzjzirMNiQLm5WZLPyB5vcWhgre9YAWlTusA=
go.mozilla.org/sops/v3 v3.7.0/go.mod h1:CJzeerUlKPLyVr8FxEGgEmc7LgUq4hwzqGxJqs8b+1c=
go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0=
go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA=
go.opencensus.io v0.19.1/go.mod h1:gug0GbSHa8Pafr0d2urOSgoXHZ6x/RUlaiT0d9pqb4A=
go.opencensus.io v0.19.2/go.mod h1:NO/8qkisMZLZ1FCsKNqtJPwc8/TaclWyY0B6wcYNg9M=
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3 h1:8sGtKOrtQqkN1bp2AtX+misvLIlOmsEsNd+9NIcPEm8=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0 h1:cxzIVoETapQEqDhQu3QfnvXAV4AlzcvUCxkVUFw3+EU=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE=
gocloud.dev v0.19.0/go.mod h1:SmKwiR8YwIMMJvQBKLsC3fHNyMwXLw3PMDO+VVteJMI=
golang.org/x/build v0.0.0-20190314133821-5284462c4bec/go.mod h1:atTaCNAy0f16Ah5aV1gMSwgiKVHwu/JncqDpuRr7lS4=
golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190418165655-df01cb2cc480/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191002192127-34f69633bfdc/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83 h1:/ZScEX8SfEmUGRHs0gxpqteO5nfNW6axyZbBdw9A12g=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190312203227-4b39c73a6495/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20210126221216-84987778548c/go.mod h1:I6l2HNBLBZEcrOoCpyKLdY2lHoRZ8lI4x60KMCQDft4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20181217174547-8f45f776aaf1/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mobile v0.0.0-20201217150744-e6ae53a27f4f/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.1-0.20200828183125-ce943fd02449/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181029044818-c44066c5c816/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181108082009-03003ca0c849/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190619014844-b5b0513f8c1b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b h1:uwuIcX0g4Yl1NC5XAz37xsr2lTtcqevgzYNVt49waME=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/oauth2 v0.0.0-20180724155351-3d292e4d0cdc/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 h1:SQFwaSi55rU7vdNs9Yr0Z324VNlrF+0wMqRXT4St8ck=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181029174526-d69651ed3497/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181218192612-074acd46bca6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190514135907-3a4b5fb9f71f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190620070143-6f217b454f45/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191210023423-ac6580df4449/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200120151820-655fe14d7479/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200917073148-efd3b9a0ff20/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201013081832-0aaa2718063a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43 h1:SgQ6LNaYJU0JIuEHv9+s6EbhSCwYeAf5Yvj6lpYlqAE=
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 h1:NusfzzA6yGQ+ua51ck7E3omNUX/JuqbFSaRGqU8CcLI=
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181219222714-6e267b5cc78e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190221204921-83362c3779f5/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190706070813-72ffa07ba3db/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
golang.org/x/tools v0.0.0-20190719005602-e377ae9d6386/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113232020-e2727e816f5a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200102140908-9497f49d5709/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204192400-7124308813f3/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200331202046-9d5940d49312/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200502202811-ed308ab3e770/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200612220849-54c614fe050c/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ=
google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181220000619-583d854617af/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.2.0/go.mod h1:IfRCZScioGtypHNTlz3gFk67J8uePVW7uDTBzXuIkhU=
google.golang.org/api v0.3.0/go.mod h1:IuvZyQh8jgscv8qWfQ4ABd8m7hEudgBFM/EdhA3BnXw=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.5.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.6.0/go.mod h1:btoxGiFvQNVUZQ8W08zLtrVS08CNpINPEfxXxgJL1Q4=
google.golang.org/api v0.6.1-0.20190607001116-5213b8090861/go.mod h1:btoxGiFvQNVUZQ8W08zLtrVS08CNpINPEfxXxgJL1Q4=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.25.0 h1:LodzhlzZEUfhXzNUMIfVlf9Gr6Ua5MMtoFWh7+f47qA=
google.golang.org/api v0.25.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc=
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63 h1:YzfoEYWbODU5Fbt37+h7X16BWQbad7Q4S6gclTKFXM8=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.22.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
google.golang.org/grpc v1.29.1 h1:EC2SB8S04d2r73uptxphDSUG+kTKVgjRPF+N3xpxRB4=
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/gcfg.v1 v1.2.0/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.44.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.56.0 h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y=
gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/square/go-jose.v2 v2.3.1 h1:SK5KegNXmKmqE342YYN2qPHEnUYeoMiXXl1poUlI+o4=
gopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/urfave/cli.v1 v1.20.0 h1:NdAVW6RYxDif9DhDHaAortIu956m2c0v+09AZBPTbE0=
gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0=
gopkg.in/warnings.v0 v0.1.1/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200121175148-a6ecf24a6d71/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107172259-749611fa9fcc h1:XANm4xAMEQhRdWKqaL0qmhGDv7RuobwCO97TIlktaQE=
gopkg.in/yaml.v3 v3.0.0-20210107172259-749611fa9fcc/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
gotest.tools/v3 v3.0.2 h1:kG1BFyqVHuQoVQiR1bWGnfz/fmHvvuiSPIV7rvl360E=
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20180920025451-e3ad64cb4ed3/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.5/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
k8s.io/api v0.0.0-20180904230853-4e7be11eab3f/go.mod h1:iuAfoD4hCxJ8Onx9kaTIt30j7jUFS00AXQi6QMi99vA=
k8s.io/api v0.17.4/go.mod h1:5qxx6vjmwUVG2nHQTKGlLts8Tbok8PzHl4vHtVFuZCA=
k8s.io/api v0.19.0/go.mod h1:I1K45XlvTrDjmj5LoM5LuP/KYrhWbjUKT/SoPG0qTjw=
k8s.io/apimachinery v0.0.0-20180904193909-def12e63c512/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0=
k8s.io/apimachinery v0.17.4/go.mod h1:gxLnyZcGNdZTCLnq3fgzyg2A5BVCHTNDFrw8AmuJ+0g=
k8s.io/apimachinery v0.19.0/go.mod h1:DnPGDnARWFvYa3pMHgSxtbZb7gpzzAZ1pTfaUNDVlmA=
k8s.io/apiserver v0.17.4/go.mod h1:5ZDQ6Xr5MNBxyi3iUZXS84QOhZl+W7Oq2us/29c0j9I=
k8s.io/client-go v0.0.0-20180910083459-2cefa64ff137/go.mod h1:7vJpHMYJwNQCWgzmNV+VYUl1zCObLyodBc8nIyt8L5s=
k8s.io/client-go v0.17.4/go.mod h1:ouF6o5pz3is8qU0/qYL2RnoxOPqgfuidYLowytyLJmc=
k8s.io/client-go v0.19.0/go.mod h1:H9E/VT95blcFQnlyShFgnFT9ZnJOAceiUHM3MlRC+mU=
k8s.io/cloud-provider v0.17.4/go.mod h1:XEjKDzfD+b9MTLXQFlDGkk6Ho8SGMpaU8Uugx/KNK9U=
k8s.io/code-generator v0.17.2/go.mod h1:DVmfPQgxQENqDIzVR2ddLXMH34qeszkKSdH/N+s+38s=
k8s.io/component-base v0.17.4/go.mod h1:5BRqHMbbQPm2kKu35v3G+CpVq4K0RJKC7TRioF0I9lE=
k8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM=
k8s.io/csi-translation-lib v0.17.4/go.mod h1:CsxmjwxEI0tTNMzffIAcgR9lX4wOh6AKHdxQrT7L0oo=
k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/gengo v0.0.0-20190822140433-26a664648505/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE=
k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y=
k8s.io/kube-openapi v0.0.0-20180731170545-e3762e86a74c/go.mod h1:BXM9ceUBTj2QnfH2MK1odQs778ajze1RxcmP6S8RVVc=
k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E=
k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o=
k8s.io/kubernetes v1.11.10/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=
k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=
k8s.io/legacy-cloud-providers v0.17.4/go.mod h1:FikRNoD64ECjkxO36gkDgJeiQWwyZTuBkhu+yxOc1Js=
k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
k8s.io/utils v0.0.0-20200729134348-d5654de09c73/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
modernc.org/cc v1.0.0/go.mod h1:1Sk4//wdnYJiUIxnW8ddKpaOJCF37yAdqYnkxUpaYxw=
modernc.org/golex v1.0.0/go.mod h1:b/QX9oBD/LhixY6NDh+IdGv17hgB+51fET1i2kPSmvk=
modernc.org/mathutil v1.0.0/go.mod h1:wU0vUrJsVWBZ4P6e7xtFJEhFSNsfRLJ8H458uRjg03k=
modernc.org/strutil v1.0.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs=
modernc.org/xc v1.0.0/go.mod h1:mRNCo0bvLjGhHO9WsyuKVU4q0ceiDDDoEeWDJHrNx8I=
mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc=
mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4=
mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f/go.mod h1:4G1h5nDURzA3bwVMZIVpwbkw+04kSxk3rAtzlimaUJw=
mvdan.cc/unparam v0.0.0-20200501210554-b37ab49443f7/go.mod h1:HGC5lll35J70Y5v7vCGb9oLhHoScFwkHDJm/05RdSTc=
pack.ag/amqp v0.11.2/go.mod h1:4/cbmt4EJXSKlG6LCfWHoqmN0uFdy5i/+YFz+fTfhV4=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI=
sigs.k8s.io/structured-merge-diff v1.0.1-0.20191108220359-b1b620dd3f06/go.mod h1:/ULNhyfzRopfcjskuui0cTITekDduZ7ycKN3oUT9R18=
sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0=
sourcegraph.com/sqs/pbtypes v1.0.0/go.mod h1:3AciMUv4qUuRHRHhOG4TZOB+72GdPVz5k+c648qsFS4=
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 316 |
Support command "up --no-cache" to temporarily disable the cache
|
It's important to be able to to disable the cache on-demand, one-time.
`dagger up --no-cache`
|
https://github.com/dagger/dagger/issues/316
|
https://github.com/dagger/dagger/pull/327
|
551f281bf7bc404a02b01600655cfdc42097880e
|
493406afe75277a50478cb4a69cf349674bc1858
| 2021-04-12T21:47:18Z |
go
| 2021-04-15T18:37:45Z |
cmd/dagger/cmd/common/common.go
|
package common
import (
"context"
"os"
"dagger.io/go/dagger"
"github.com/rs/zerolog/log"
"github.com/spf13/viper"
)
func GetCurrentDeploymentState(ctx context.Context, store *dagger.Store) *dagger.DeploymentState {
lg := log.Ctx(ctx)
deploymentName := viper.GetString("deployment")
if deploymentName != "" {
st, err := store.LookupDeploymentByName(ctx, deploymentName)
if err != nil {
lg.
Fatal().
Err(err).
Str("deploymentName", deploymentName).
Msg("failed to lookup deployment by name")
}
return st
}
wd, err := os.Getwd()
if err != nil {
lg.Fatal().Err(err).Msg("cannot get current working directory")
}
st, err := store.LookupDeploymentByPath(ctx, wd)
if err != nil {
lg.
Fatal().
Err(err).
Str("deploymentPath", wd).
Msg("failed to lookup deployment by path")
}
if len(st) == 0 {
lg.
Fatal().
Err(err).
Str("deploymentPath", wd).
Msg("no deployments match the current directory")
}
if len(st) > 1 {
deployments := []string{}
for _, s := range st {
deployments = append(deployments, s.Name)
}
lg.
Fatal().
Err(err).
Str("deploymentPath", wd).
Strs("deployments", deployments).
Msg("multiple deployments match the current directory, select one with `--deployment`")
}
return st[0]
}
// Re-compute a deployment (equivalent to `dagger up`).
func DeploymentUp(ctx context.Context, state *dagger.DeploymentState) *dagger.Deployment {
lg := log.Ctx(ctx)
c, err := dagger.NewClient(ctx, "")
if err != nil {
lg.Fatal().Err(err).Msg("unable to create client")
}
result, err := c.Do(ctx, state, func(ctx context.Context, deployment *dagger.Deployment, s dagger.Solver) error {
log.Ctx(ctx).Debug().Msg("bringing deployment up")
return deployment.Up(ctx, s)
})
if err != nil {
lg.Fatal().Err(err).Msg("failed to up deployment")
}
return result
}
|
closed
|
dagger/dagger
|
https://github.com/dagger/dagger
| 316 |
Support command "up --no-cache" to temporarily disable the cache
|
It's important to be able to to disable the cache on-demand, one-time.
`dagger up --no-cache`
|
https://github.com/dagger/dagger/issues/316
|
https://github.com/dagger/dagger/pull/327
|
551f281bf7bc404a02b01600655cfdc42097880e
|
493406afe75277a50478cb4a69cf349674bc1858
| 2021-04-12T21:47:18Z |
go
| 2021-04-15T18:37:45Z |
cmd/dagger/cmd/compute.go
|
package cmd
import (
"encoding/json"
"errors"
"fmt"
"os"
"strings"
"cuelang.org/go/cue"
"dagger.io/go/cmd/dagger/cmd/common"
"dagger.io/go/cmd/dagger/logger"
"dagger.io/go/dagger"
"dagger.io/go/dagger/compiler"
"go.mozilla.org/sops/v3"
"go.mozilla.org/sops/v3/decrypt"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var computeCmd = &cobra.Command{
Use: "compute CONFIG",
Short: "Compute a configuration",
Args: cobra.ExactArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
st := &dagger.DeploymentState{
ID: uuid.New().String(),
Name: "FIXME",
PlanSource: dagger.DirInput(args[0], []string{"*.cue", "cue.mod"}),
}
for _, input := range viper.GetStringSlice("input-string") {
parts := strings.SplitN(input, "=", 2)
k, v := parts[0], parts[1]
err := st.SetInput(k, dagger.TextInput(v))
if err != nil {
lg.
Fatal().
Err(err).
Str("input", k).
Msg("failed to add input")
}
}
for _, input := range viper.GetStringSlice("input-dir") {
parts := strings.SplitN(input, "=", 2)
k, v := parts[0], parts[1]
err := st.SetInput(k, dagger.DirInput(v, []string{}))
if err != nil {
lg.
Fatal().
Err(err).
Str("input", k).
Msg("failed to add input")
}
}
for _, input := range viper.GetStringSlice("input-git") {
parts := strings.SplitN(input, "=", 2)
k, v := parts[0], parts[1]
err := st.SetInput(k, dagger.GitInput(v, "", ""))
if err != nil {
lg.
Fatal().
Err(err).
Str("input", k).
Msg("failed to add input")
}
}
if f := viper.GetString("input-json"); f != "" {
lg := lg.With().Str("path", f).Logger()
content, err := os.ReadFile(f)
if err != nil {
lg.Fatal().Err(err).Msg("failed to read file")
}
plaintext, err := decrypt.Data(content, "json")
if err != nil && !errors.Is(err, sops.MetadataNotFound) {
lg.Fatal().Err(err).Msg("unable to decrypt")
}
if len(plaintext) > 0 {
content = plaintext
}
if !json.Valid(content) {
lg.Fatal().Msg("invalid json")
}
err = st.SetInput("", dagger.JSONInput(string(content)))
if err != nil {
lg.Fatal().Err(err).Msg("failed to add input")
}
}
if f := viper.GetString("input-yaml"); f != "" {
lg := lg.With().Str("path", f).Logger()
content, err := os.ReadFile(f)
if err != nil {
lg.Fatal().Err(err).Msg("failed to read file")
}
plaintext, err := decrypt.Data(content, "yaml")
if err != nil && !errors.Is(err, sops.MetadataNotFound) {
lg.Fatal().Err(err).Msg("unable to decrypt")
}
if len(plaintext) > 0 {
content = plaintext
}
err = st.SetInput("", dagger.YAMLInput(string(content)))
if err != nil {
lg.Fatal().Err(err).Msg("failed to add input")
}
}
if f := viper.GetString("input-file"); f != "" {
lg := lg.With().Str("path", f).Logger()
parts := strings.SplitN(f, "=", 2)
k, v := parts[0], parts[1]
content, err := os.ReadFile(v)
if err != nil {
lg.Fatal().Err(err).Msg("failed to read file")
}
if len(content) > 0 {
err = st.SetInput(k, dagger.FileInput(v))
if err != nil {
lg.Fatal().Err(err).Msg("failed to set input string")
}
}
}
deployment := common.DeploymentUp(ctx, st)
v := compiler.NewValue()
if err := v.FillPath(cue.MakePath(), deployment.Plan()); err != nil {
lg.Fatal().Err(err).Msg("failed to merge")
}
if err := v.FillPath(cue.MakePath(), deployment.Input()); err != nil {
lg.Fatal().Err(err).Msg("failed to merge")
}
if err := v.FillPath(cue.MakePath(), deployment.Computed()); err != nil {
lg.Fatal().Err(err).Msg("failed to merge")
}
fmt.Println(v.JSON())
},
}
func init() {
computeCmd.Flags().StringSlice("input-string", []string{}, "TARGET=STRING")
computeCmd.Flags().StringSlice("input-dir", []string{}, "TARGET=PATH")
computeCmd.Flags().String("input-file", "", "TARGET=PATH")
computeCmd.Flags().StringSlice("input-git", []string{}, "TARGET=REMOTE#REF")
computeCmd.Flags().String("input-json", "", "JSON")
computeCmd.Flags().String("input-yaml", "", "YAML")
if err := viper.BindPFlags(computeCmd.Flags()); err != nil {
panic(err)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.