mirror of
https://github.com/zebrajr/ansible.git
synced 2025-12-06 12:19:53 +01:00
parent
45d62a726c
commit
0cc771dc3c
|
|
@ -107,7 +107,6 @@ from ansible import context
|
||||||
from ansible.utils import display as _display
|
from ansible.utils import display as _display
|
||||||
from ansible.cli.arguments import option_helpers as opt_help
|
from ansible.cli.arguments import option_helpers as opt_help
|
||||||
from ansible.inventory.manager import InventoryManager
|
from ansible.inventory.manager import InventoryManager
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.module_utils.common.collections import is_sequence
|
from ansible.module_utils.common.collections import is_sequence
|
||||||
from ansible.module_utils.common.file import is_executable
|
from ansible.module_utils.common.file import is_executable
|
||||||
|
|
@ -403,8 +402,8 @@ class CLI(ABC):
|
||||||
options = super(MyCLI, self).post_process_args(options)
|
options = super(MyCLI, self).post_process_args(options)
|
||||||
if options.addition and options.subtraction:
|
if options.addition and options.subtraction:
|
||||||
raise AnsibleOptionsError('Only one of --addition and --subtraction can be specified')
|
raise AnsibleOptionsError('Only one of --addition and --subtraction can be specified')
|
||||||
if isinstance(options.listofhosts, string_types):
|
if isinstance(options.listofhosts, str):
|
||||||
options.listofhosts = string_types.split(',')
|
options.listofhosts = options.listofhosts.split(',')
|
||||||
return options
|
return options
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
@ -440,7 +439,7 @@ class CLI(ABC):
|
||||||
if options.inventory:
|
if options.inventory:
|
||||||
|
|
||||||
# should always be list
|
# should always be list
|
||||||
if isinstance(options.inventory, string_types):
|
if isinstance(options.inventory, str):
|
||||||
options.inventory = [options.inventory]
|
options.inventory = [options.inventory]
|
||||||
|
|
||||||
# Ensure full paths when needed
|
# Ensure full paths when needed
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,6 @@ from ansible.config.manager import ConfigManager
|
||||||
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleRequiredOptionError
|
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleRequiredOptionError
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
|
from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
|
||||||
from ansible._internal import _json
|
from ansible._internal import _json
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.parsing.quoting import is_quoted
|
from ansible.parsing.quoting import is_quoted
|
||||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||||
from ansible.utils.color import stringc
|
from ansible.utils.color import stringc
|
||||||
|
|
@ -288,21 +287,21 @@ class ConfigCLI(CLI):
|
||||||
default = '0'
|
default = '0'
|
||||||
elif default:
|
elif default:
|
||||||
if stype == 'list':
|
if stype == 'list':
|
||||||
if not isinstance(default, string_types):
|
if not isinstance(default, str):
|
||||||
# python lists are not valid env ones
|
# python lists are not valid env ones
|
||||||
try:
|
try:
|
||||||
default = ', '.join(default)
|
default = ', '.join(default)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# list of other stuff
|
# list of other stuff
|
||||||
default = '%s' % to_native(default)
|
default = '%s' % to_native(default)
|
||||||
if isinstance(default, string_types) and not is_quoted(default):
|
if isinstance(default, str) and not is_quoted(default):
|
||||||
default = shlex.quote(default)
|
default = shlex.quote(default)
|
||||||
elif default is None:
|
elif default is None:
|
||||||
default = ''
|
default = ''
|
||||||
|
|
||||||
if subkey in settings[setting] and settings[setting][subkey]:
|
if subkey in settings[setting] and settings[setting][subkey]:
|
||||||
entry = settings[setting][subkey][-1]['name']
|
entry = settings[setting][subkey][-1]['name']
|
||||||
if isinstance(settings[setting]['description'], string_types):
|
if isinstance(settings[setting]['description'], str):
|
||||||
desc = settings[setting]['description']
|
desc = settings[setting]['description']
|
||||||
else:
|
else:
|
||||||
desc = '\n#'.join(settings[setting]['description'])
|
desc = '\n#'.join(settings[setting]['description'])
|
||||||
|
|
@ -343,7 +342,7 @@ class ConfigCLI(CLI):
|
||||||
sections[s] = new_sections[s]
|
sections[s] = new_sections[s]
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(opt['description'], string_types):
|
if isinstance(opt['description'], str):
|
||||||
desc = '# (%s) %s' % (opt.get('type', 'string'), opt['description'])
|
desc = '# (%s) %s' % (opt.get('type', 'string'), opt['description'])
|
||||||
else:
|
else:
|
||||||
desc = "# (%s) " % opt.get('type', 'string')
|
desc = "# (%s) " % opt.get('type', 'string')
|
||||||
|
|
@ -361,7 +360,7 @@ class ConfigCLI(CLI):
|
||||||
seen[entry['section']].append(entry['key'])
|
seen[entry['section']].append(entry['key'])
|
||||||
|
|
||||||
default = self.config.template_default(opt.get('default', ''), get_constants())
|
default = self.config.template_default(opt.get('default', ''), get_constants())
|
||||||
if opt.get('type', '') == 'list' and not isinstance(default, string_types):
|
if opt.get('type', '') == 'list' and not isinstance(default, str):
|
||||||
# python lists are not valid ini ones
|
# python lists are not valid ini ones
|
||||||
default = ', '.join(default)
|
default = ', '.join(default)
|
||||||
elif default is None:
|
elif default is None:
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,6 @@ from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.module_utils.common.collections import is_sequence
|
from ansible.module_utils.common.collections import is_sequence
|
||||||
from ansible.module_utils.common.yaml import yaml_dump
|
from ansible.module_utils.common.yaml import yaml_dump
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.parsing.plugin_docs import read_docstub
|
from ansible.parsing.plugin_docs import read_docstub
|
||||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||||
|
|
@ -1274,7 +1273,7 @@ class DocCLI(CLI, RoleMixin):
|
||||||
sub_indent = inline_indent + extra_indent
|
sub_indent = inline_indent + extra_indent
|
||||||
if is_sequence(opt['description']):
|
if is_sequence(opt['description']):
|
||||||
for entry_idx, entry in enumerate(opt['description'], 1):
|
for entry_idx, entry in enumerate(opt['description'], 1):
|
||||||
if not isinstance(entry, string_types):
|
if not isinstance(entry, str):
|
||||||
raise AnsibleError("Expected string in description of %s at index %s, got %s" % (o, entry_idx, type(entry)))
|
raise AnsibleError("Expected string in description of %s at index %s, got %s" % (o, entry_idx, type(entry)))
|
||||||
if entry_idx == 1:
|
if entry_idx == 1:
|
||||||
text.append(key + DocCLI.warp_fill(DocCLI.tty_ify(entry), limit,
|
text.append(key + DocCLI.warp_fill(DocCLI.tty_ify(entry), limit,
|
||||||
|
|
@ -1282,7 +1281,7 @@ class DocCLI(CLI, RoleMixin):
|
||||||
else:
|
else:
|
||||||
text.append(DocCLI.warp_fill(DocCLI.tty_ify(entry), limit, initial_indent=sub_indent, subsequent_indent=sub_indent))
|
text.append(DocCLI.warp_fill(DocCLI.tty_ify(entry), limit, initial_indent=sub_indent, subsequent_indent=sub_indent))
|
||||||
else:
|
else:
|
||||||
if not isinstance(opt['description'], string_types):
|
if not isinstance(opt['description'], str):
|
||||||
raise AnsibleError("Expected string in description of %s, got %s" % (o, type(opt['description'])))
|
raise AnsibleError("Expected string in description of %s, got %s" % (o, type(opt['description'])))
|
||||||
text.append(key + DocCLI.warp_fill(DocCLI.tty_ify(opt['description']), limit,
|
text.append(key + DocCLI.warp_fill(DocCLI.tty_ify(opt['description']), limit,
|
||||||
initial_indent=inline_indent, subsequent_indent=sub_indent, initial_extra=len(extra_indent)))
|
initial_indent=inline_indent, subsequent_indent=sub_indent, initial_extra=len(extra_indent)))
|
||||||
|
|
@ -1466,7 +1465,7 @@ class DocCLI(CLI, RoleMixin):
|
||||||
if k not in doc:
|
if k not in doc:
|
||||||
continue
|
continue
|
||||||
text.append('')
|
text.append('')
|
||||||
if isinstance(doc[k], string_types):
|
if isinstance(doc[k], str):
|
||||||
text.append('%s: %s' % (k.upper(), DocCLI.warp_fill(DocCLI.tty_ify(doc[k]),
|
text.append('%s: %s' % (k.upper(), DocCLI.warp_fill(DocCLI.tty_ify(doc[k]),
|
||||||
limit - (len(k) + 2), subsequent_indent=opt_indent)))
|
limit - (len(k) + 2), subsequent_indent=opt_indent)))
|
||||||
elif isinstance(doc[k], (list, tuple)):
|
elif isinstance(doc[k], (list, tuple)):
|
||||||
|
|
@ -1478,7 +1477,7 @@ class DocCLI(CLI, RoleMixin):
|
||||||
if doc.get('examples', False):
|
if doc.get('examples', False):
|
||||||
text.append('')
|
text.append('')
|
||||||
text.append(_format("EXAMPLES:", 'bold'))
|
text.append(_format("EXAMPLES:", 'bold'))
|
||||||
if isinstance(doc['examples'], string_types):
|
if isinstance(doc['examples'], str):
|
||||||
text.append(doc.pop('examples').strip())
|
text.append(doc.pop('examples').strip())
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
|
@ -1572,7 +1571,7 @@ class DocCLI(CLI, RoleMixin):
|
||||||
continue
|
continue
|
||||||
text.append('')
|
text.append('')
|
||||||
header = _format(k.upper(), 'bold')
|
header = _format(k.upper(), 'bold')
|
||||||
if isinstance(doc[k], string_types):
|
if isinstance(doc[k], str):
|
||||||
text.append('%s: %s' % (header, DocCLI.warp_fill(DocCLI.tty_ify(doc[k]), limit - (len(k) + 2), subsequent_indent=opt_indent)))
|
text.append('%s: %s' % (header, DocCLI.warp_fill(DocCLI.tty_ify(doc[k]), limit - (len(k) + 2), subsequent_indent=opt_indent)))
|
||||||
elif isinstance(doc[k], (list, tuple)):
|
elif isinstance(doc[k], (list, tuple)):
|
||||||
text.append('%s: %s' % (header, ', '.join(doc[k])))
|
text.append('%s: %s' % (header, ', '.join(doc[k])))
|
||||||
|
|
@ -1584,7 +1583,7 @@ class DocCLI(CLI, RoleMixin):
|
||||||
if doc.get('plainexamples', False):
|
if doc.get('plainexamples', False):
|
||||||
text.append('')
|
text.append('')
|
||||||
text.append(_format("EXAMPLES:", 'bold'))
|
text.append(_format("EXAMPLES:", 'bold'))
|
||||||
if isinstance(doc['plainexamples'], string_types):
|
if isinstance(doc['plainexamples'], str):
|
||||||
text.append(doc.pop('plainexamples').strip())
|
text.append(doc.pop('plainexamples').strip())
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
|
@ -1621,7 +1620,7 @@ def _do_yaml_snippet(doc):
|
||||||
|
|
||||||
for o in sorted(doc['options'].keys()):
|
for o in sorted(doc['options'].keys()):
|
||||||
opt = doc['options'][o]
|
opt = doc['options'][o]
|
||||||
if isinstance(opt['description'], string_types):
|
if isinstance(opt['description'], str):
|
||||||
desc = DocCLI.tty_ify(opt['description'])
|
desc = DocCLI.tty_ify(opt['description'])
|
||||||
else:
|
else:
|
||||||
desc = DocCLI.tty_ify(" ".join(opt['description']))
|
desc = DocCLI.tty_ify(" ".join(opt['description']))
|
||||||
|
|
|
||||||
|
|
@ -54,7 +54,6 @@ from ansible.module_utils.common.collections import is_iterable
|
||||||
from ansible.module_utils.common.yaml import yaml_dump, yaml_load
|
from ansible.module_utils.common.yaml import yaml_dump, yaml_load
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible._internal._datatag._tags import TrustedAsTemplate
|
from ansible._internal._datatag._tags import TrustedAsTemplate
|
||||||
from ansible.module_utils import six
|
|
||||||
from ansible.parsing.dataloader import DataLoader
|
from ansible.parsing.dataloader import DataLoader
|
||||||
from ansible.playbook.role.requirement import RoleRequirement
|
from ansible.playbook.role.requirement import RoleRequirement
|
||||||
from ansible._internal._templating._engine import TemplateEngine
|
from ansible._internal._templating._engine import TemplateEngine
|
||||||
|
|
@ -65,7 +64,6 @@ from ansible.utils.plugin_docs import get_versioned_doclink
|
||||||
from ansible.utils.vars import load_extra_vars
|
from ansible.utils.vars import load_extra_vars
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
urlparse = six.moves.urllib.parse.urlparse
|
|
||||||
|
|
||||||
|
|
||||||
def with_collection_artifacts_manager(wrapped_method):
|
def with_collection_artifacts_manager(wrapped_method):
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,6 @@ from ansible._internal._datatag._tags import TrustedAsTemplate
|
||||||
from ansible.module_utils.parsing.convert_bool import boolean
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
from ansible.module_utils.common.text.converters import to_text, to_native
|
from ansible.module_utils.common.text.converters import to_text, to_native
|
||||||
from ansible.module_utils.connection import write_to_stream
|
from ansible.module_utils.connection import write_to_stream
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.playbook.task import Task
|
from ansible.playbook.task import Task
|
||||||
from ansible.plugins import get_plugin_class
|
from ansible.plugins import get_plugin_class
|
||||||
from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader
|
from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader
|
||||||
|
|
@ -340,7 +339,7 @@ class TaskExecutor:
|
||||||
})
|
})
|
||||||
|
|
||||||
# if plugin is loaded, get resolved name, otherwise leave original task connection
|
# if plugin is loaded, get resolved name, otherwise leave original task connection
|
||||||
if self._connection and not isinstance(self._connection, string_types):
|
if self._connection and not isinstance(self._connection, str):
|
||||||
task_fields['connection'] = getattr(self._connection, 'ansible_name')
|
task_fields['connection'] = getattr(self._connection, 'ansible_name')
|
||||||
|
|
||||||
tr = _RawTaskResult(
|
tr = _RawTaskResult(
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,6 @@ from ansible.errors import AnsibleError
|
||||||
from ansible.galaxy.user_agent import user_agent
|
from ansible.galaxy.user_agent import user_agent
|
||||||
from ansible.module_utils.api import retry_with_delays_and_condition
|
from ansible.module_utils.api import retry_with_delays_and_condition
|
||||||
from ansible.module_utils.api import generate_jittered_backoff
|
from ansible.module_utils.api import generate_jittered_backoff
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.module_utils.urls import open_url, prepare_multipart
|
from ansible.module_utils.urls import open_url, prepare_multipart
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
@ -595,11 +594,11 @@ class GalaxyAPI:
|
||||||
page_size = kwargs.get('page_size', None)
|
page_size = kwargs.get('page_size', None)
|
||||||
author = kwargs.get('author', None)
|
author = kwargs.get('author', None)
|
||||||
|
|
||||||
if tags and isinstance(tags, string_types):
|
if tags and isinstance(tags, str):
|
||||||
tags = tags.split(',')
|
tags = tags.split(',')
|
||||||
search_url += '&tags_autocomplete=' + '+'.join(tags)
|
search_url += '&tags_autocomplete=' + '+'.join(tags)
|
||||||
|
|
||||||
if platforms and isinstance(platforms, string_types):
|
if platforms and isinstance(platforms, str):
|
||||||
platforms = platforms.split(',')
|
platforms = platforms.split(',')
|
||||||
search_url += '&platforms_autocomplete=' + '+'.join(platforms)
|
search_url += '&platforms_autocomplete=' + '+'.join(platforms)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,6 @@ from ansible.galaxy.dependency_resolution.versioning import (
|
||||||
is_pre_release,
|
is_pre_release,
|
||||||
meets_requirements,
|
meets_requirements,
|
||||||
)
|
)
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.utils.version import SemanticVersion, LooseVersion
|
from ansible.utils.version import SemanticVersion, LooseVersion
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -278,7 +277,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
|
||||||
# NOTE: Another known mistake is setting a minor part of the SemVer notation
|
# NOTE: Another known mistake is setting a minor part of the SemVer notation
|
||||||
# NOTE: skipping the "patch" bit like "1.0" which is assumed non-compliant even
|
# NOTE: skipping the "patch" bit like "1.0" which is assumed non-compliant even
|
||||||
# NOTE: after the conversion to string.
|
# NOTE: after the conversion to string.
|
||||||
if not isinstance(version, string_types):
|
if not isinstance(version, str):
|
||||||
raise ValueError(version_err)
|
raise ValueError(version_err)
|
||||||
elif version != '*':
|
elif version != '*':
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,6 @@ from ansible._internal import _json, _wrapt
|
||||||
from ansible._internal._json import EncryptedStringBehavior
|
from ansible._internal._json import EncryptedStringBehavior
|
||||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||||
from ansible.inventory.data import InventoryData
|
from ansible.inventory.data import InventoryData
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.parsing.utils.addresses import parse_address
|
from ansible.parsing.utils.addresses import parse_address
|
||||||
from ansible.plugins.loader import inventory_loader
|
from ansible.plugins.loader import inventory_loader
|
||||||
|
|
@ -112,7 +111,7 @@ def split_host_pattern(pattern):
|
||||||
results = (split_host_pattern(p) for p in pattern)
|
results = (split_host_pattern(p) for p in pattern)
|
||||||
# flatten the results
|
# flatten the results
|
||||||
return list(itertools.chain.from_iterable(results))
|
return list(itertools.chain.from_iterable(results))
|
||||||
elif not isinstance(pattern, string_types):
|
elif not isinstance(pattern, str):
|
||||||
pattern = to_text(pattern, errors='surrogate_or_strict')
|
pattern = to_text(pattern, errors='surrogate_or_strict')
|
||||||
|
|
||||||
# If it's got commas in it, we'll treat it as a straightforward
|
# If it's got commas in it, we'll treat it as a straightforward
|
||||||
|
|
@ -162,7 +161,7 @@ class InventoryManager(object):
|
||||||
# the inventory dirs, files, script paths or lists of hosts
|
# the inventory dirs, files, script paths or lists of hosts
|
||||||
if sources is None:
|
if sources is None:
|
||||||
self._sources = []
|
self._sources = []
|
||||||
elif isinstance(sources, string_types):
|
elif isinstance(sources, str):
|
||||||
self._sources = [sources]
|
self._sources = [sources]
|
||||||
else:
|
else:
|
||||||
self._sources = sources
|
self._sources = sources
|
||||||
|
|
|
||||||
86
lib/ansible/module_utils/_internal/_no_six.py
Normal file
86
lib/ansible/module_utils/_internal/_no_six.py
Normal file
|
|
@ -0,0 +1,86 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
from ansible.module_utils.common import warnings
|
||||||
|
|
||||||
|
|
||||||
|
# INLINED FROM THE SIX LIBRARY, see lib/ansible/module_utils/six/__init__.py
|
||||||
|
# Copyright (c) 2010-2024 Benjamin Peterson
|
||||||
|
def with_metaclass(meta, *bases):
|
||||||
|
"""Create a base class with a metaclass."""
|
||||||
|
|
||||||
|
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||||
|
# metaclass for one level of class instantiation that replaces itself with
|
||||||
|
# the actual metaclass.
|
||||||
|
class metaclass(type):
|
||||||
|
|
||||||
|
def __new__(cls, name, this_bases, d):
|
||||||
|
if sys.version_info[:2] >= (3, 7):
|
||||||
|
# This version introduced PEP 560 that requires a bit
|
||||||
|
# of extra care (we mimic what is done by __build_class__).
|
||||||
|
resolved_bases = types.resolve_bases(bases)
|
||||||
|
if resolved_bases is not bases:
|
||||||
|
d['__orig_bases__'] = bases
|
||||||
|
else:
|
||||||
|
resolved_bases = bases
|
||||||
|
return meta(name, resolved_bases, d)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __prepare__(cls, name, this_bases):
|
||||||
|
return meta.__prepare__(name, bases)
|
||||||
|
|
||||||
|
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||||
|
|
||||||
|
|
||||||
|
def add_metaclass(metaclass):
|
||||||
|
"""Class decorator for creating a class with a metaclass."""
|
||||||
|
|
||||||
|
def wrapper(cls):
|
||||||
|
orig_vars = cls.__dict__.copy()
|
||||||
|
slots = orig_vars.get('__slots__')
|
||||||
|
if slots is not None:
|
||||||
|
if isinstance(slots, str):
|
||||||
|
slots = [slots]
|
||||||
|
for slots_var in slots:
|
||||||
|
orig_vars.pop(slots_var)
|
||||||
|
orig_vars.pop('__dict__', None)
|
||||||
|
orig_vars.pop('__weakref__', None)
|
||||||
|
if hasattr(cls, '__qualname__'):
|
||||||
|
orig_vars['__qualname__'] = cls.__qualname__
|
||||||
|
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return iter(d.items(**kw))
|
||||||
|
|
||||||
|
|
||||||
|
_mini_six = {
|
||||||
|
"PY2": False,
|
||||||
|
"PY3": True,
|
||||||
|
"text_type": str,
|
||||||
|
"binary_type": bytes,
|
||||||
|
"string_types": (str,),
|
||||||
|
"integer_types": (int,),
|
||||||
|
"iteritems": iteritems,
|
||||||
|
"add_metaclass": add_metaclass,
|
||||||
|
"with_metaclass": with_metaclass,
|
||||||
|
}
|
||||||
|
# INLINED SIX END
|
||||||
|
|
||||||
|
|
||||||
|
def deprecate(importable_name: str, module_name: str, *deprecated_args) -> object:
|
||||||
|
"""Inject import-time deprecation warnings."""
|
||||||
|
if not (importable_name in deprecated_args and (importable := _mini_six.get(importable_name, ...) is not ...)):
|
||||||
|
raise AttributeError(f"module {module_name!r} has no attribute {importable_name!r}")
|
||||||
|
|
||||||
|
# TODO Inspect and remove all calls to this function in 2.24
|
||||||
|
warnings.deprecate(
|
||||||
|
msg=f"Importing {importable_name!r} from {module_name!r} is deprecated.",
|
||||||
|
version="2.24",
|
||||||
|
)
|
||||||
|
|
||||||
|
return importable
|
||||||
|
|
@ -1,15 +1,35 @@
|
||||||
# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com> 2016
|
# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com> 2016
|
||||||
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||||
|
|
||||||
"""
|
|
||||||
.. warn:: Use ansible.module_utils.common.text.converters instead.
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
# Backwards compat for people still calling it from this package
|
from ansible.module_utils.common import warnings as _warnings
|
||||||
# pylint: disable=unused-import
|
|
||||||
import codecs
|
|
||||||
|
|
||||||
from ansible.module_utils.six import PY3, text_type, binary_type
|
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
_mini_six = {
|
||||||
|
"binary_type": bytes,
|
||||||
|
"text_type": str,
|
||||||
|
"PY3": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name: str) -> object:
|
||||||
|
"""Inject import-time deprecation warnings."""
|
||||||
|
help_text: str | None = None
|
||||||
|
importable: object
|
||||||
|
if importable_name == "codecs":
|
||||||
|
import codecs
|
||||||
|
importable = codecs
|
||||||
|
elif importable_name in {"to_bytes", "to_native", "to_text"}:
|
||||||
|
from ansible.module_utils.common.text import converters
|
||||||
|
importable = getattr(converters, importable_name)
|
||||||
|
help_text = "Use ansible.module_utils.common.text.converters instead."
|
||||||
|
elif (importable := _mini_six.get(importable_name, ...)) is ...:
|
||||||
|
raise AttributeError(f"module {__name__!r} has no attribute {importable_name!r}")
|
||||||
|
|
||||||
|
_warnings.deprecate(
|
||||||
|
msg=f"Importing {importable_name!r} from {__name__!r} is deprecated.",
|
||||||
|
version="2.24",
|
||||||
|
help_text=help_text,
|
||||||
|
)
|
||||||
|
return importable
|
||||||
|
|
|
||||||
|
|
@ -46,6 +46,15 @@ import tempfile
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from collections.abc import (
|
||||||
|
KeysView,
|
||||||
|
Mapping,
|
||||||
|
MutableMapping,
|
||||||
|
Sequence,
|
||||||
|
MutableSequence,
|
||||||
|
Set,
|
||||||
|
MutableSet,
|
||||||
|
)
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -123,13 +132,6 @@ def _get_available_hash_algorithms():
|
||||||
AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms()
|
AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms()
|
||||||
|
|
||||||
from ansible.module_utils.common import json as _json
|
from ansible.module_utils.common import json as _json
|
||||||
|
|
||||||
from ansible.module_utils.six.moves.collections_abc import (
|
|
||||||
KeysView,
|
|
||||||
Mapping, MutableMapping,
|
|
||||||
Sequence, MutableSequence,
|
|
||||||
Set, MutableSet,
|
|
||||||
)
|
|
||||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
from ansible.module_utils.common.file import (
|
from ansible.module_utils.common.file import (
|
||||||
|
|
@ -2186,6 +2188,18 @@ def get_module_path():
|
||||||
return os.path.dirname(os.path.realpath(__file__))
|
return os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
_mini_six = {
|
||||||
|
"b": lambda s: s.encode("latin-1"),
|
||||||
|
"PY2": False,
|
||||||
|
"PY3": True,
|
||||||
|
"text_type": str,
|
||||||
|
"binary_type": bytes,
|
||||||
|
"string_types": (str,),
|
||||||
|
"integer_types": (int,),
|
||||||
|
"iteritems": lambda d, **kw: iter(d.items(**kw)),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def __getattr__(importable_name):
|
def __getattr__(importable_name):
|
||||||
"""Inject import-time deprecation warnings."""
|
"""Inject import-time deprecation warnings."""
|
||||||
if importable_name == 'datetime':
|
if importable_name == 'datetime':
|
||||||
|
|
@ -2203,24 +2217,12 @@ def __getattr__(importable_name):
|
||||||
elif importable_name == 'repeat':
|
elif importable_name == 'repeat':
|
||||||
from itertools import repeat
|
from itertools import repeat
|
||||||
importable = repeat
|
importable = repeat
|
||||||
elif importable_name in {
|
|
||||||
'PY2', 'PY3', 'b', 'binary_type', 'integer_types',
|
|
||||||
'iteritems', 'string_types', 'text_type',
|
|
||||||
}:
|
|
||||||
import importlib
|
|
||||||
importable = getattr(
|
|
||||||
importlib.import_module('ansible.module_utils.six'),
|
|
||||||
importable_name
|
|
||||||
)
|
|
||||||
elif importable_name == 'map':
|
elif importable_name == 'map':
|
||||||
importable = map
|
importable = map
|
||||||
elif importable_name == 'shlex_quote':
|
elif importable_name == 'shlex_quote':
|
||||||
importable = shlex.quote
|
importable = shlex.quote
|
||||||
else:
|
elif (importable := _mini_six.get(importable_name, ...)) is ...:
|
||||||
raise AttributeError(
|
raise AttributeError(f"module {__name__!r} has no attribute {importable_name!r}")
|
||||||
f'cannot import name {importable_name !r} '
|
|
||||||
f"from '{__name__}' ({__file__ !s})"
|
|
||||||
)
|
|
||||||
|
|
||||||
deprecate(
|
deprecate(
|
||||||
msg=f"Importing '{importable_name}' from '{__name__}' is deprecated.",
|
msg=f"Importing '{importable_name}' from '{__name__}' is deprecated.",
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||||
"""Collections ABC import shim.
|
"""Collections ABC import shim.
|
||||||
|
|
||||||
Use `ansible.module_utils.six.moves.collections_abc` instead, which has been available since ansible-core 2.11.
|
Use `collections.abc` instead.
|
||||||
This module exists only for backwards compatibility.
|
This module exists only for backwards compatibility.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
@ -10,7 +10,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
# Although this was originally intended for internal use only, it has wide adoption in collections.
|
# Although this was originally intended for internal use only, it has wide adoption in collections.
|
||||||
# This is due in part to sanity tests previously recommending its use over `collections` imports.
|
# This is due in part to sanity tests previously recommending its use over `collections` imports.
|
||||||
from ansible.module_utils.six.moves.collections_abc import ( # pylint: disable=unused-import
|
from collections.abc import ( # pylint: disable=unused-import
|
||||||
MappingView,
|
MappingView,
|
||||||
ItemsView,
|
ItemsView,
|
||||||
KeysView,
|
KeysView,
|
||||||
|
|
@ -25,3 +25,12 @@ from ansible.module_utils.six.moves.collections_abc import ( # pylint: disable=
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ansible.module_utils.common import warnings as _warnings
|
||||||
|
|
||||||
|
|
||||||
|
_warnings.deprecate(
|
||||||
|
msg="The `ansible.module_utils.common._collections_compat` module is deprecated.",
|
||||||
|
help_text="Use `collections.abc` from the Python standard library instead.",
|
||||||
|
version="2.24",
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -6,9 +6,10 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
from collections.abc import Hashable, Mapping, MutableMapping, Sequence # pylint: disable=unused-import
|
||||||
|
|
||||||
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.common import warnings as _warnings
|
from ansible.module_utils.common import warnings as _warnings
|
||||||
from ansible.module_utils.six import binary_type, text_type
|
|
||||||
from ansible.module_utils.six.moves.collections_abc import Hashable, Mapping, MutableMapping, Sequence # pylint: disable=unused-import
|
|
||||||
|
|
||||||
|
|
||||||
class ImmutableDict(Hashable, Mapping):
|
class ImmutableDict(Hashable, Mapping):
|
||||||
|
|
@ -67,7 +68,7 @@ class ImmutableDict(Hashable, Mapping):
|
||||||
|
|
||||||
def is_string(seq):
|
def is_string(seq):
|
||||||
"""Identify whether the input has a string-like type (including bytes)."""
|
"""Identify whether the input has a string-like type (including bytes)."""
|
||||||
return isinstance(seq, (text_type, binary_type))
|
return isinstance(seq, (str, bytes))
|
||||||
|
|
||||||
|
|
||||||
def is_iterable(seq, include_strings=False):
|
def is_iterable(seq, include_strings=False):
|
||||||
|
|
@ -114,3 +115,7 @@ def count(seq):
|
||||||
for elem in seq:
|
for elem in seq:
|
||||||
counters[elem] = counters.get(elem, 0) + 1
|
counters[elem] = counters.get(elem, 0) + 1
|
||||||
return counters
|
return counters
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type")
|
||||||
|
|
|
||||||
|
|
@ -7,10 +7,9 @@ from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
from collections.abc import MutableMapping
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
from ansible.module_utils.six.moves.collections_abc import MutableMapping
|
|
||||||
|
|
||||||
|
|
||||||
def camel_dict_to_snake_dict(camel_dict, reversible=False, ignore_list=()):
|
def camel_dict_to_snake_dict(camel_dict, reversible=False, ignore_list=()):
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
|
|
@ -6,11 +6,13 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
# backward compat
|
||||||
|
from builtins import zip # pylint: disable=unused-import
|
||||||
|
|
||||||
from struct import pack
|
from struct import pack
|
||||||
from socket import inet_ntoa
|
from socket import inet_ntoa
|
||||||
|
|
||||||
from ansible.module_utils.six.moves import zip
|
|
||||||
|
|
||||||
|
|
||||||
VALID_MASKS = [2**8 - 2**i for i in range(0, 9)]
|
VALID_MASKS = [2**8 - 2**i for i in range(0, 9)]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,9 +9,19 @@ import os
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from itertools import chain
|
from collections.abc import (
|
||||||
|
KeysView,
|
||||||
|
Set,
|
||||||
|
Sequence,
|
||||||
|
Mapping,
|
||||||
|
MutableMapping,
|
||||||
|
MutableSet,
|
||||||
|
MutableSequence,
|
||||||
|
)
|
||||||
|
from itertools import chain # pylint: disable=unused-import
|
||||||
|
|
||||||
from ansible.module_utils.common.collections import is_iterable
|
from ansible.module_utils.common.collections import is_iterable
|
||||||
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils._internal._datatag import AnsibleSerializable, AnsibleTagHelper
|
from ansible.module_utils._internal._datatag import AnsibleSerializable, AnsibleTagHelper
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.module_utils.common.warnings import warn
|
from ansible.module_utils.common.warnings import warn
|
||||||
|
|
@ -33,26 +43,6 @@ from ansible.module_utils.errors import (
|
||||||
SubParameterTypeError,
|
SubParameterTypeError,
|
||||||
)
|
)
|
||||||
from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE
|
from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE
|
||||||
|
|
||||||
from ansible.module_utils.six.moves.collections_abc import (
|
|
||||||
KeysView,
|
|
||||||
Set,
|
|
||||||
Sequence,
|
|
||||||
Mapping,
|
|
||||||
MutableMapping,
|
|
||||||
MutableSet,
|
|
||||||
MutableSequence,
|
|
||||||
)
|
|
||||||
|
|
||||||
from ansible.module_utils.six import (
|
|
||||||
binary_type,
|
|
||||||
integer_types,
|
|
||||||
string_types,
|
|
||||||
text_type,
|
|
||||||
PY2,
|
|
||||||
PY3,
|
|
||||||
)
|
|
||||||
|
|
||||||
from ansible.module_utils.common.validation import (
|
from ansible.module_utils.common.validation import (
|
||||||
check_mutually_exclusive,
|
check_mutually_exclusive,
|
||||||
check_required_arguments,
|
check_required_arguments,
|
||||||
|
|
@ -243,7 +233,7 @@ def _handle_aliases(argument_spec, parameters, alias_warnings=None, alias_deprec
|
||||||
if aliases is None:
|
if aliases is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not is_iterable(aliases) or isinstance(aliases, (binary_type, text_type)):
|
if not is_iterable(aliases) or isinstance(aliases, (bytes, str)):
|
||||||
raise TypeError('internal error: aliases must be a list or tuple')
|
raise TypeError('internal error: aliases must be a list or tuple')
|
||||||
|
|
||||||
for alias in aliases:
|
for alias in aliases:
|
||||||
|
|
@ -346,7 +336,7 @@ def _list_no_log_values(argument_spec, params):
|
||||||
for sub_param in sub_parameters:
|
for sub_param in sub_parameters:
|
||||||
# Validate dict fields in case they came in as strings
|
# Validate dict fields in case they came in as strings
|
||||||
|
|
||||||
if isinstance(sub_param, string_types):
|
if isinstance(sub_param, str):
|
||||||
sub_param = check_type_dict(sub_param)
|
sub_param = check_type_dict(sub_param)
|
||||||
|
|
||||||
if not isinstance(sub_param, Mapping):
|
if not isinstance(sub_param, Mapping):
|
||||||
|
|
@ -362,7 +352,7 @@ def _return_datastructure_name(obj):
|
||||||
""" Return native stringified values from datastructures.
|
""" Return native stringified values from datastructures.
|
||||||
|
|
||||||
For use with removing sensitive values pre-jsonification."""
|
For use with removing sensitive values pre-jsonification."""
|
||||||
if isinstance(obj, (text_type, binary_type)):
|
if isinstance(obj, (str, bytes)):
|
||||||
if obj:
|
if obj:
|
||||||
yield to_native(obj, errors='surrogate_or_strict')
|
yield to_native(obj, errors='surrogate_or_strict')
|
||||||
return
|
return
|
||||||
|
|
@ -375,7 +365,7 @@ def _return_datastructure_name(obj):
|
||||||
elif obj is None or isinstance(obj, bool):
|
elif obj is None or isinstance(obj, bool):
|
||||||
# This must come before int because bools are also ints
|
# This must come before int because bools are also ints
|
||||||
return
|
return
|
||||||
elif isinstance(obj, tuple(list(integer_types) + [float])):
|
elif isinstance(obj, (int, float)):
|
||||||
yield to_native(obj, nonstring='simplerepr')
|
yield to_native(obj, nonstring='simplerepr')
|
||||||
else:
|
else:
|
||||||
raise TypeError('Unknown parameter type: %s' % (type(obj)))
|
raise TypeError('Unknown parameter type: %s' % (type(obj)))
|
||||||
|
|
@ -413,26 +403,23 @@ def _remove_values_conditions(value, no_log_strings, deferred_removals):
|
||||||
"""
|
"""
|
||||||
original_value = value
|
original_value = value
|
||||||
|
|
||||||
if isinstance(value, (text_type, binary_type)):
|
if isinstance(value, (str, bytes)):
|
||||||
# Need native str type
|
# Need native str type
|
||||||
native_str_value = value
|
native_str_value = value
|
||||||
if isinstance(value, text_type):
|
if isinstance(value, str):
|
||||||
value_is_text = True
|
value_is_text = True
|
||||||
if PY2:
|
elif isinstance(value, bytes):
|
||||||
native_str_value = to_bytes(value, errors='surrogate_or_strict')
|
|
||||||
elif isinstance(value, binary_type):
|
|
||||||
value_is_text = False
|
value_is_text = False
|
||||||
if PY3:
|
native_str_value = to_text(value, errors='surrogate_or_strict')
|
||||||
native_str_value = to_text(value, errors='surrogate_or_strict')
|
|
||||||
|
|
||||||
if native_str_value in no_log_strings:
|
if native_str_value in no_log_strings:
|
||||||
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
|
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
|
||||||
for omit_me in no_log_strings:
|
for omit_me in no_log_strings:
|
||||||
native_str_value = native_str_value.replace(omit_me, '*' * 8)
|
native_str_value = native_str_value.replace(omit_me, '*' * 8)
|
||||||
|
|
||||||
if value_is_text and isinstance(native_str_value, binary_type):
|
if value_is_text and isinstance(native_str_value, bytes):
|
||||||
value = to_text(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
|
value = to_text(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
|
||||||
elif not value_is_text and isinstance(native_str_value, text_type):
|
elif not value_is_text and isinstance(native_str_value, str):
|
||||||
value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
|
value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
|
||||||
else:
|
else:
|
||||||
value = native_str_value
|
value = native_str_value
|
||||||
|
|
@ -514,7 +501,7 @@ def _set_defaults(argument_spec, parameters, set_default=True):
|
||||||
|
|
||||||
def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals):
|
def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals):
|
||||||
""" Helper method to :func:`sanitize_keys` to build ``deferred_removals`` and avoid deep recursion. """
|
""" Helper method to :func:`sanitize_keys` to build ``deferred_removals`` and avoid deep recursion. """
|
||||||
if isinstance(value, (text_type, binary_type)):
|
if isinstance(value, (str, bytes)):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if isinstance(value, Sequence):
|
if isinstance(value, Sequence):
|
||||||
|
|
@ -541,7 +528,7 @@ def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_remov
|
||||||
deferred_removals.append((value, new_value))
|
deferred_removals.append((value, new_value))
|
||||||
return new_value
|
return new_value
|
||||||
|
|
||||||
if isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))):
|
if isinstance(value, (int, float, bool, NoneType)):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if isinstance(value, (datetime.datetime, datetime.date, datetime.time)):
|
if isinstance(value, (datetime.datetime, datetime.date, datetime.time)):
|
||||||
|
|
@ -560,8 +547,8 @@ def _validate_elements(wanted_type, parameter, values, options_context=None, err
|
||||||
# Get param name for strings so we can later display this value in a useful error message if needed
|
# Get param name for strings so we can later display this value in a useful error message if needed
|
||||||
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
|
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if wanted_element_type == 'str' and isinstance(wanted_type, string_types):
|
if wanted_element_type == 'str' and isinstance(wanted_type, str):
|
||||||
if isinstance(parameter, string_types):
|
if isinstance(parameter, str):
|
||||||
kwargs['param'] = parameter
|
kwargs['param'] = parameter
|
||||||
elif isinstance(parameter, dict):
|
elif isinstance(parameter, dict):
|
||||||
kwargs['param'] = list(parameter.keys())[0]
|
kwargs['param'] = list(parameter.keys())[0]
|
||||||
|
|
@ -620,7 +607,7 @@ def _validate_argument_types(argument_spec, parameters, prefix='', options_conte
|
||||||
# Get param name for strings so we can later display this value in a useful error message if needed
|
# Get param name for strings so we can later display this value in a useful error message if needed
|
||||||
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
|
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if wanted_name == 'str' and isinstance(wanted_type, string_types):
|
if wanted_name == 'str' and isinstance(wanted_type, str):
|
||||||
kwargs['param'] = list(parameters.keys())[0]
|
kwargs['param'] = list(parameters.keys())[0]
|
||||||
|
|
||||||
# Get the name of the parent key if this is a nested option
|
# Get the name of the parent key if this is a nested option
|
||||||
|
|
@ -659,7 +646,7 @@ def _validate_argument_values(argument_spec, parameters, options_context=None, e
|
||||||
if choices is None:
|
if choices is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(choices, (frozenset, KeysView, Sequence)) and not isinstance(choices, (binary_type, text_type)):
|
if isinstance(choices, (frozenset, KeysView, Sequence)) and not isinstance(choices, (bytes, str)):
|
||||||
if param in parameters:
|
if param in parameters:
|
||||||
# Allow one or more when type='list' param with choices
|
# Allow one or more when type='list' param with choices
|
||||||
if isinstance(parameters[param], list):
|
if isinstance(parameters[param], list):
|
||||||
|
|
@ -745,7 +732,7 @@ def _validate_sub_spec(
|
||||||
options_context.append(param)
|
options_context.append(param)
|
||||||
|
|
||||||
# Make sure we can iterate over the elements
|
# Make sure we can iterate over the elements
|
||||||
if not isinstance(parameters[param], Sequence) or isinstance(parameters[param], string_types):
|
if not isinstance(parameters[param], Sequence) or isinstance(parameters[param], str):
|
||||||
elements = [parameters[param]]
|
elements = [parameters[param]]
|
||||||
else:
|
else:
|
||||||
elements = parameters[param]
|
elements = parameters[param]
|
||||||
|
|
@ -940,3 +927,7 @@ def remove_values(value, no_log_strings):
|
||||||
raise TypeError('Unknown container type encountered when removing private values from output')
|
raise TypeError('Unknown container type encountered when removing private values from output')
|
||||||
|
|
||||||
return new_value
|
return new_value
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "integer_types", "string_types", "PY2", "PY3")
|
||||||
|
|
|
||||||
|
|
@ -8,11 +8,8 @@ from __future__ import annotations
|
||||||
import codecs
|
import codecs
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from ansible.module_utils.six import (
|
from ansible.module_utils._internal import _no_six
|
||||||
binary_type,
|
|
||||||
iteritems,
|
|
||||||
text_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
codecs.lookup_error('surrogateescape')
|
codecs.lookup_error('surrogateescape')
|
||||||
|
|
@ -90,7 +87,7 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
|
||||||
|
|
||||||
Added the ``surrogate_then_replace`` error handler and made it the default error handler.
|
Added the ``surrogate_then_replace`` error handler and made it the default error handler.
|
||||||
"""
|
"""
|
||||||
if isinstance(obj, binary_type):
|
if isinstance(obj, bytes):
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
# We're given a text string
|
# We're given a text string
|
||||||
|
|
@ -104,7 +101,7 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
|
||||||
else:
|
else:
|
||||||
errors = 'replace'
|
errors = 'replace'
|
||||||
|
|
||||||
if isinstance(obj, text_type):
|
if isinstance(obj, str):
|
||||||
try:
|
try:
|
||||||
# Try this first as it's the fastest
|
# Try this first as it's the fastest
|
||||||
return obj.encode(encoding, errors)
|
return obj.encode(encoding, errors)
|
||||||
|
|
@ -194,7 +191,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
|
||||||
|
|
||||||
Added the surrogate_then_replace error handler and made it the default error handler.
|
Added the surrogate_then_replace error handler and made it the default error handler.
|
||||||
"""
|
"""
|
||||||
if isinstance(obj, text_type):
|
if isinstance(obj, str):
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
if errors in _COMPOSED_ERROR_HANDLERS:
|
if errors in _COMPOSED_ERROR_HANDLERS:
|
||||||
|
|
@ -205,7 +202,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
|
||||||
else:
|
else:
|
||||||
errors = 'replace'
|
errors = 'replace'
|
||||||
|
|
||||||
if isinstance(obj, binary_type):
|
if isinstance(obj, bytes):
|
||||||
# Note: We don't need special handling for surrogate_then_replace
|
# Note: We don't need special handling for surrogate_then_replace
|
||||||
# because all bytes will either be made into surrogates or are valid
|
# because all bytes will either be made into surrogates or are valid
|
||||||
# to decode.
|
# to decode.
|
||||||
|
|
@ -259,10 +256,10 @@ def container_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'):
|
||||||
"""
|
"""
|
||||||
# DTFIX-FUTURE: deprecate
|
# DTFIX-FUTURE: deprecate
|
||||||
|
|
||||||
if isinstance(d, text_type):
|
if isinstance(d, str):
|
||||||
return to_bytes(d, encoding=encoding, errors=errors)
|
return to_bytes(d, encoding=encoding, errors=errors)
|
||||||
elif isinstance(d, dict):
|
elif isinstance(d, dict):
|
||||||
return dict(container_to_bytes(o, encoding, errors) for o in iteritems(d))
|
return dict(container_to_bytes(o, encoding, errors) for o in d.items())
|
||||||
elif isinstance(d, list):
|
elif isinstance(d, list):
|
||||||
return [container_to_bytes(o, encoding, errors) for o in d]
|
return [container_to_bytes(o, encoding, errors) for o in d]
|
||||||
elif isinstance(d, tuple):
|
elif isinstance(d, tuple):
|
||||||
|
|
@ -279,14 +276,18 @@ def container_to_text(d, encoding='utf-8', errors='surrogate_or_strict'):
|
||||||
"""
|
"""
|
||||||
# DTFIX-FUTURE: deprecate
|
# DTFIX-FUTURE: deprecate
|
||||||
|
|
||||||
if isinstance(d, binary_type):
|
if isinstance(d, bytes):
|
||||||
# Warning, can traceback
|
# Warning, can traceback
|
||||||
return to_text(d, encoding=encoding, errors=errors)
|
return to_text(d, encoding=encoding, errors=errors)
|
||||||
elif isinstance(d, dict):
|
elif isinstance(d, dict):
|
||||||
return dict(container_to_text(o, encoding, errors) for o in iteritems(d))
|
return dict(container_to_text(o, encoding, errors) for o in d.items())
|
||||||
elif isinstance(d, list):
|
elif isinstance(d, list):
|
||||||
return [container_to_text(o, encoding, errors) for o in d]
|
return [container_to_text(o, encoding, errors) for o in d]
|
||||||
elif isinstance(d, tuple):
|
elif isinstance(d, tuple):
|
||||||
return tuple(container_to_text(o, encoding, errors) for o in d)
|
return tuple(container_to_text(o, encoding, errors) for o in d)
|
||||||
else:
|
else:
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "iteritems")
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ansible.module_utils.six import iteritems
|
from ansible.module_utils._internal import _no_six
|
||||||
|
|
||||||
SIZE_RANGES = {
|
SIZE_RANGES = {
|
||||||
'Y': 1 << 80,
|
'Y': 1 << 80,
|
||||||
|
|
@ -117,7 +117,7 @@ def bytes_to_human(size, isbits=False, unit=None):
|
||||||
base = 'bits'
|
base = 'bits'
|
||||||
suffix = ''
|
suffix = ''
|
||||||
|
|
||||||
for suffix, limit in sorted(iteritems(SIZE_RANGES), key=lambda item: -item[1]):
|
for suffix, limit in sorted(SIZE_RANGES.items(), key=lambda item: -item[1]):
|
||||||
if (unit is None and size >= limit) or unit is not None and unit.upper() == suffix[0]:
|
if (unit is None and size >= limit) or unit is not None and unit.upper() == suffix[0]:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
@ -127,3 +127,7 @@ def bytes_to_human(size, isbits=False, unit=None):
|
||||||
suffix = base
|
suffix = base
|
||||||
|
|
||||||
return '%.2f %s' % (size / limit, suffix)
|
return '%.2f %s' % (size / limit, suffix)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "iteritems")
|
||||||
|
|
|
||||||
|
|
@ -10,15 +10,13 @@ import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ast import literal_eval
|
from ast import literal_eval
|
||||||
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.common import json as _common_json
|
from ansible.module_utils.common import json as _common_json
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils.common.collections import is_iterable
|
from ansible.module_utils.common.collections import is_iterable
|
||||||
from ansible.module_utils.common.text.formatters import human_to_bytes
|
from ansible.module_utils.common.text.formatters import human_to_bytes
|
||||||
from ansible.module_utils.common.warnings import deprecate
|
from ansible.module_utils.common.warnings import deprecate
|
||||||
from ansible.module_utils.parsing.convert_bool import boolean
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
from ansible.module_utils.six import (
|
|
||||||
string_types,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def count_terms(terms, parameters):
|
def count_terms(terms, parameters):
|
||||||
|
|
@ -43,7 +41,7 @@ def safe_eval(value, locals=None, include_exceptions=False):
|
||||||
version="2.21",
|
version="2.21",
|
||||||
)
|
)
|
||||||
# do not allow method calls to modules
|
# do not allow method calls to modules
|
||||||
if not isinstance(value, string_types):
|
if not isinstance(value, str):
|
||||||
# already templated to a datavaluestructure, perhaps?
|
# already templated to a datavaluestructure, perhaps?
|
||||||
if include_exceptions:
|
if include_exceptions:
|
||||||
return (value, None)
|
return (value, None)
|
||||||
|
|
@ -194,7 +192,7 @@ def check_required_by(requirements, parameters, options_context=None):
|
||||||
if key not in parameters or parameters[key] is None:
|
if key not in parameters or parameters[key] is None:
|
||||||
continue
|
continue
|
||||||
# Support strings (single-item lists)
|
# Support strings (single-item lists)
|
||||||
if isinstance(value, string_types):
|
if isinstance(value, str):
|
||||||
value = [value]
|
value = [value]
|
||||||
|
|
||||||
if missing := [required for required in value if required not in parameters or parameters[required] is None]:
|
if missing := [required for required in value if required not in parameters or parameters[required] is None]:
|
||||||
|
|
@ -373,7 +371,7 @@ def check_type_str(value, allow_conversion=True, param=None, prefix=''):
|
||||||
:returns: Original value if it is a string, the value converted to a string
|
:returns: Original value if it is a string, the value converted to a string
|
||||||
if allow_conversion=True, or raises a TypeError if allow_conversion=False.
|
if allow_conversion=True, or raises a TypeError if allow_conversion=False.
|
||||||
"""
|
"""
|
||||||
if isinstance(value, string_types):
|
if isinstance(value, str):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if allow_conversion and value is not None:
|
if allow_conversion and value is not None:
|
||||||
|
|
@ -403,7 +401,7 @@ def check_type_list(value):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
# DTFIX-FUTURE: deprecate legacy comma split functionality, eventually replace with `_check_type_list_strict`
|
# DTFIX-FUTURE: deprecate legacy comma split functionality, eventually replace with `_check_type_list_strict`
|
||||||
if isinstance(value, string_types):
|
if isinstance(value, str):
|
||||||
return value.split(",")
|
return value.split(",")
|
||||||
elif isinstance(value, int) or isinstance(value, float):
|
elif isinstance(value, int) or isinstance(value, float):
|
||||||
return [str(value)]
|
return [str(value)]
|
||||||
|
|
@ -431,7 +429,7 @@ def check_type_dict(value):
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if isinstance(value, string_types):
|
if isinstance(value, str):
|
||||||
if value.startswith("{"):
|
if value.startswith("{"):
|
||||||
try:
|
try:
|
||||||
return json.loads(value)
|
return json.loads(value)
|
||||||
|
|
@ -494,7 +492,7 @@ def check_type_bool(value):
|
||||||
if isinstance(value, bool):
|
if isinstance(value, bool):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if isinstance(value, string_types) or isinstance(value, (int, float)):
|
if isinstance(value, str) or isinstance(value, (int, float)):
|
||||||
return boolean(value)
|
return boolean(value)
|
||||||
|
|
||||||
raise TypeError('%s cannot be converted to a bool' % type(value))
|
raise TypeError('%s cannot be converted to a bool' % type(value))
|
||||||
|
|
@ -594,3 +592,7 @@ def check_type_jsonarg(value):
|
||||||
return json.dumps(value, cls=_common_json._get_legacy_encoder(), _decode_bytes=True)
|
return json.dumps(value, cls=_common_json._get_legacy_encoder(), _decode_bytes=True)
|
||||||
|
|
||||||
raise TypeError('%s cannot be converted to a json string' % type(value))
|
raise TypeError('%s cannot be converted to a json string' % type(value))
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "string_types")
|
||||||
|
|
|
||||||
|
|
@ -36,9 +36,10 @@ import struct
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.module_utils.common.json import _get_legacy_encoder
|
from ansible.module_utils.common.json import _get_legacy_encoder
|
||||||
from ansible.module_utils.six import iteritems
|
|
||||||
|
|
||||||
|
|
||||||
def write_to_stream(stream, obj):
|
def write_to_stream(stream, obj):
|
||||||
|
|
@ -95,7 +96,7 @@ class ConnectionError(Exception):
|
||||||
|
|
||||||
def __init__(self, message, *args, **kwargs):
|
def __init__(self, message, *args, **kwargs):
|
||||||
super(ConnectionError, self).__init__(message)
|
super(ConnectionError, self).__init__(message)
|
||||||
for k, v in iteritems(kwargs):
|
for k, v in kwargs.items():
|
||||||
setattr(self, k, v)
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -149,7 +150,7 @@ class Connection(object):
|
||||||
raise ConnectionError(
|
raise ConnectionError(
|
||||||
"Unable to decode JSON from response to {0}. Received '{1}'.".format(name, out)
|
"Unable to decode JSON from response to {0}. Received '{1}'.".format(name, out)
|
||||||
)
|
)
|
||||||
params = [repr(arg) for arg in args] + ['{0}={1!r}'.format(k, v) for k, v in iteritems(kwargs)]
|
params = [repr(arg) for arg in args] + ['{0}={1!r}'.format(k, v) for k, v in kwargs.items()]
|
||||||
params = ', '.join(params)
|
params = ', '.join(params)
|
||||||
raise ConnectionError(
|
raise ConnectionError(
|
||||||
"Unable to decode JSON from response to {0}({1}). Received '{2}'.".format(name, params, out)
|
"Unable to decode JSON from response to {0}({1}). Received '{2}'.".format(name, params, out)
|
||||||
|
|
@ -200,3 +201,7 @@ class Connection(object):
|
||||||
sf.close()
|
sf.close()
|
||||||
|
|
||||||
return to_text(response, errors='surrogate_or_strict')
|
return to_text(response, errors='surrogate_or_strict')
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "iteritems")
|
||||||
|
|
|
||||||
|
|
@ -24,13 +24,13 @@ import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils._internal._concurrent import _futures
|
from ansible.module_utils._internal._concurrent import _futures
|
||||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.common.text.formatters import bytes_to_human
|
from ansible.module_utils.common.text.formatters import bytes_to_human
|
||||||
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
||||||
from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size
|
from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size
|
||||||
from ansible.module_utils.six import iteritems
|
|
||||||
|
|
||||||
# import this as a module to ensure we get the same module instance
|
# import this as a module to ensure we get the same module instance
|
||||||
from ansible.module_utils.facts import timeout
|
from ansible.module_utils.facts import timeout
|
||||||
|
|
@ -653,7 +653,7 @@ class LinuxHardware(Hardware):
|
||||||
retval[target].add(entry)
|
retval[target].add(entry)
|
||||||
except OSError:
|
except OSError:
|
||||||
continue
|
continue
|
||||||
return dict((k, list(sorted(v))) for (k, v) in iteritems(retval))
|
return dict((k, list(sorted(v))) for (k, v) in retval.items())
|
||||||
except OSError:
|
except OSError:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
@ -665,7 +665,7 @@ class LinuxHardware(Hardware):
|
||||||
device = elements[3]
|
device = elements[3]
|
||||||
target = elements[5]
|
target = elements[5]
|
||||||
retval[target].add(device)
|
retval[target].add(device)
|
||||||
return dict((k, list(sorted(v))) for (k, v) in iteritems(retval))
|
return dict((k, list(sorted(v))) for (k, v) in retval.items())
|
||||||
except OSError:
|
except OSError:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
@ -750,7 +750,7 @@ class LinuxHardware(Hardware):
|
||||||
d = {}
|
d = {}
|
||||||
d['virtual'] = virtual
|
d['virtual'] = virtual
|
||||||
d['links'] = {}
|
d['links'] = {}
|
||||||
for (link_type, link_values) in iteritems(links):
|
for (link_type, link_values) in links.items():
|
||||||
d['links'][link_type] = link_values.get(block, [])
|
d['links'][link_type] = link_values.get(block, [])
|
||||||
diskname = os.path.basename(sysdir)
|
diskname = os.path.basename(sysdir)
|
||||||
for key in ['vendor', 'model', 'sas_address', 'sas_device_handle']:
|
for key in ['vendor', 'model', 'sas_address', 'sas_device_handle']:
|
||||||
|
|
@ -801,7 +801,7 @@ class LinuxHardware(Hardware):
|
||||||
part_sysdir = sysdir + "/" + partname
|
part_sysdir = sysdir + "/" + partname
|
||||||
|
|
||||||
part['links'] = {}
|
part['links'] = {}
|
||||||
for (link_type, link_values) in iteritems(links):
|
for (link_type, link_values) in links.items():
|
||||||
part['links'][link_type] = link_values.get(partname, [])
|
part['links'][link_type] = link_values.get(partname, [])
|
||||||
|
|
||||||
part['start'] = get_file_content(part_sysdir + "/start", 0)
|
part['start'] = get_file_content(part_sysdir + "/start", 0)
|
||||||
|
|
@ -925,3 +925,7 @@ class LinuxHardwareCollector(HardwareCollector):
|
||||||
_fact_class = LinuxHardware
|
_fact_class = LinuxHardware
|
||||||
|
|
||||||
required_facts = set(['platform'])
|
required_facts = set(['platform'])
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "iteritems")
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from ansible.module_utils.six.moves import reduce
|
from functools import reduce
|
||||||
|
|
||||||
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
||||||
from ansible.module_utils.facts.timeout import TimeoutError, timeout
|
from ansible.module_utils.facts.timeout import TimeoutError, timeout
|
||||||
|
|
|
||||||
|
|
@ -18,12 +18,13 @@ from __future__ import annotations
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||||
from ansible.module_utils.common.text.formatters import bytes_to_human
|
from ansible.module_utils.common.text.formatters import bytes_to_human
|
||||||
from ansible.module_utils.facts.utils import get_file_content, get_mount_size
|
from ansible.module_utils.facts.utils import get_file_content, get_mount_size
|
||||||
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
||||||
from ansible.module_utils.facts import timeout
|
from ansible.module_utils.facts import timeout
|
||||||
from ansible.module_utils.six.moves import reduce
|
|
||||||
|
|
||||||
|
|
||||||
class SunOSHardware(Hardware):
|
class SunOSHardware(Hardware):
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import ansible.module_utils.compat.typing as t
|
||||||
|
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
from ansible.module_utils.six import with_metaclass
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.basic import missing_required_lib
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
||||||
|
|
@ -19,7 +19,7 @@ def get_all_pkg_managers():
|
||||||
return {obj.__name__.lower(): obj for obj in get_all_subclasses(PkgMgr) if obj not in (CLIMgr, LibMgr, RespawningLibMgr)}
|
return {obj.__name__.lower(): obj for obj in get_all_subclasses(PkgMgr) if obj not in (CLIMgr, LibMgr, RespawningLibMgr)}
|
||||||
|
|
||||||
|
|
||||||
class PkgMgr(with_metaclass(ABCMeta, object)): # type: ignore[misc]
|
class PkgMgr(metaclass=ABCMeta):
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def is_available(self, handle_exceptions):
|
def is_available(self, handle_exceptions):
|
||||||
|
|
@ -125,3 +125,7 @@ class CLIMgr(PkgMgr):
|
||||||
if not handle_exceptions:
|
if not handle_exceptions:
|
||||||
raise
|
raise
|
||||||
return found
|
return found
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "with_metaclass")
|
||||||
|
|
|
||||||
|
|
@ -18,8 +18,7 @@ from __future__ import annotations
|
||||||
import os
|
import os
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
from ansible.module_utils.six import iteritems
|
from ansible.module_utils._internal import _no_six
|
||||||
|
|
||||||
from ansible.module_utils.facts.collector import BaseFactCollector
|
from ansible.module_utils.facts.collector import BaseFactCollector
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -31,7 +30,11 @@ class EnvFactCollector(BaseFactCollector):
|
||||||
env_facts = {}
|
env_facts = {}
|
||||||
env_facts['env'] = {}
|
env_facts['env'] = {}
|
||||||
|
|
||||||
for k, v in iteritems(os.environ):
|
for k, v in os.environ.items():
|
||||||
env_facts['env'][k] = v
|
env_facts['env'][k] = v
|
||||||
|
|
||||||
return env_facts
|
return env_facts
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "iteritems")
|
||||||
|
|
|
||||||
|
|
@ -3,16 +3,18 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import configparser
|
||||||
import glob
|
import glob
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.facts.utils import get_file_content
|
from ansible.module_utils.facts.utils import get_file_content
|
||||||
from ansible.module_utils.facts.collector import BaseFactCollector
|
from ansible.module_utils.facts.collector import BaseFactCollector
|
||||||
from ansible.module_utils.six.moves import configparser, StringIO
|
|
||||||
|
|
||||||
|
|
||||||
class LocalFactCollector(BaseFactCollector):
|
class LocalFactCollector(BaseFactCollector):
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import collections.abc as c
|
import collections.abc as c
|
||||||
|
|
||||||
from ansible.module_utils.six import binary_type, text_type
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -20,7 +20,7 @@ def boolean(value, strict=True):
|
||||||
|
|
||||||
normalized_value = value
|
normalized_value = value
|
||||||
|
|
||||||
if isinstance(value, (text_type, binary_type)):
|
if isinstance(value, (str, bytes)):
|
||||||
normalized_value = to_text(value, errors='surrogate_or_strict').lower().strip()
|
normalized_value = to_text(value, errors='surrogate_or_strict').lower().strip()
|
||||||
|
|
||||||
if not isinstance(value, c.Hashable):
|
if not isinstance(value, c.Hashable):
|
||||||
|
|
@ -32,3 +32,7 @@ def boolean(value, strict=True):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
raise TypeError("The value '%s' is not a valid boolean. Valid booleans include: %s" % (to_text(value), ', '.join(repr(i) for i in BOOLEANS)))
|
raise TypeError("The value '%s' is not a valid boolean. Valid booleans include: %s" % (to_text(value), ', '.join(repr(i) for i in BOOLEANS)))
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type")
|
||||||
|
|
|
||||||
|
|
@ -36,7 +36,6 @@ import select
|
||||||
import shlex
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from ansible.module_utils.six import b
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -200,7 +199,7 @@ def daemonize(module, cmd):
|
||||||
fds = [p.stdout, p.stderr]
|
fds = [p.stdout, p.stderr]
|
||||||
|
|
||||||
# loop reading output till it is done
|
# loop reading output till it is done
|
||||||
output = {p.stdout: b(""), p.stderr: b("")}
|
output = {p.stdout: b"", p.stderr: b""}
|
||||||
while fds:
|
while fds:
|
||||||
rfd, wfd, efd = select.select(fds, [], fds, 1)
|
rfd, wfd, efd = select.select(fds, [], fds, 1)
|
||||||
if (rfd + wfd + efd) or p.poll() is None:
|
if (rfd + wfd + efd) or p.poll() is None:
|
||||||
|
|
@ -234,7 +233,7 @@ def daemonize(module, cmd):
|
||||||
os.waitpid(pid, 0)
|
os.waitpid(pid, 0)
|
||||||
|
|
||||||
# Grab response data after child finishes
|
# Grab response data after child finishes
|
||||||
return_data = b("")
|
return_data = b""
|
||||||
while True:
|
while True:
|
||||||
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
|
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
|
||||||
if pipe[0] in rfd:
|
if pipe[0] in rfd:
|
||||||
|
|
|
||||||
|
|
@ -383,7 +383,6 @@ from ansible.module_utils.common.file import S_IRWXU_RXG_RXO
|
||||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||||
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.urls import fetch_file
|
from ansible.module_utils.urls import fetch_file
|
||||||
|
|
||||||
DPKG_OPTIONS = 'force-confdef,force-confold'
|
DPKG_OPTIONS = 'force-confdef,force-confold'
|
||||||
|
|
@ -633,7 +632,7 @@ def expand_pkgspec_from_fnmatches(m, pkgspec, cache):
|
||||||
if pkgspec:
|
if pkgspec:
|
||||||
for pkgspec_pattern in pkgspec:
|
for pkgspec_pattern in pkgspec:
|
||||||
|
|
||||||
if not isinstance(pkgspec_pattern, string_types):
|
if not isinstance(pkgspec_pattern, str):
|
||||||
m.fail_json(msg="Invalid type for package name, expected string but got %s" % type(pkgspec_pattern))
|
m.fail_json(msg="Invalid type for package name, expected string but got %s" % type(pkgspec_pattern))
|
||||||
|
|
||||||
pkgname_pattern, version_cmp, version = package_split(pkgspec_pattern)
|
pkgname_pattern, version_cmp, version = package_split(pkgspec_pattern)
|
||||||
|
|
|
||||||
|
|
@ -131,7 +131,6 @@ import re
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.six import b, indexbytes
|
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -141,6 +140,7 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno
|
||||||
tmp = os.fdopen(tmpfd, 'wb')
|
tmp = os.fdopen(tmpfd, 'wb')
|
||||||
delimit_me = False
|
delimit_me = False
|
||||||
add_newline = False
|
add_newline = False
|
||||||
|
b_linesep = os.linesep.encode()
|
||||||
|
|
||||||
for f in sorted(os.listdir(src_path)):
|
for f in sorted(os.listdir(src_path)):
|
||||||
if compiled_regexp and not compiled_regexp.search(f):
|
if compiled_regexp and not compiled_regexp.search(f):
|
||||||
|
|
@ -153,7 +153,7 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno
|
||||||
|
|
||||||
# always put a newline between fragments if the previous fragment didn't end with a newline.
|
# always put a newline between fragments if the previous fragment didn't end with a newline.
|
||||||
if add_newline:
|
if add_newline:
|
||||||
tmp.write(b('\n'))
|
tmp.write(b_linesep)
|
||||||
|
|
||||||
# delimiters should only appear between fragments
|
# delimiters should only appear between fragments
|
||||||
if delimit_me:
|
if delimit_me:
|
||||||
|
|
@ -163,16 +163,12 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno
|
||||||
tmp.write(delimiter)
|
tmp.write(delimiter)
|
||||||
# always make sure there's a newline after the
|
# always make sure there's a newline after the
|
||||||
# delimiter, so lines don't run together
|
# delimiter, so lines don't run together
|
||||||
|
if not delimiter.endswith(b_linesep):
|
||||||
# byte indexing differs on Python 2 and 3,
|
tmp.write(b_linesep)
|
||||||
# use indexbytes for compat
|
|
||||||
# chr(10) == '\n'
|
|
||||||
if indexbytes(delimiter, -1) != 10:
|
|
||||||
tmp.write(b('\n'))
|
|
||||||
|
|
||||||
tmp.write(fragment_content)
|
tmp.write(fragment_content)
|
||||||
delimit_me = True
|
delimit_me = True
|
||||||
if fragment_content.endswith(b('\n')):
|
if fragment_content.endswith(b_linesep):
|
||||||
add_newline = False
|
add_newline = False
|
||||||
else:
|
else:
|
||||||
add_newline = True
|
add_newline = True
|
||||||
|
|
|
||||||
|
|
@ -192,7 +192,6 @@ EXAMPLES = r"""
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
from ansible.module_utils.six import b
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||||
|
|
||||||
|
|
@ -291,7 +290,8 @@ def main():
|
||||||
block = to_bytes(params['block'])
|
block = to_bytes(params['block'])
|
||||||
marker = to_bytes(params['marker'])
|
marker = to_bytes(params['marker'])
|
||||||
present = params['state'] == 'present'
|
present = params['state'] == 'present'
|
||||||
blank_line = [b(os.linesep)]
|
b_linesep = os.linesep.encode()
|
||||||
|
blank_line = [b_linesep]
|
||||||
|
|
||||||
if not present and not path_exists:
|
if not present and not path_exists:
|
||||||
module.exit_json(changed=False, msg="File %s not present" % path)
|
module.exit_json(changed=False, msg="File %s not present" % path)
|
||||||
|
|
@ -306,11 +306,11 @@ def main():
|
||||||
else:
|
else:
|
||||||
insertre = None
|
insertre = None
|
||||||
|
|
||||||
marker0 = re.sub(b(r'{mark}'), b(params['marker_begin']), marker) + b(os.linesep)
|
marker0 = re.sub(r'{mark}'.encode(), to_bytes(params['marker_begin']), marker) + b_linesep
|
||||||
marker1 = re.sub(b(r'{mark}'), b(params['marker_end']), marker) + b(os.linesep)
|
marker1 = re.sub(r'{mark}'.encode(), to_bytes(params['marker_end']), marker) + b_linesep
|
||||||
if present and block:
|
if present and block:
|
||||||
if not block.endswith(b(os.linesep)):
|
if not block.endswith(b_linesep):
|
||||||
block += b(os.linesep)
|
block += b_linesep
|
||||||
blocklines = [marker0] + block.splitlines(True) + [marker1]
|
blocklines = [marker0] + block.splitlines(True) + [marker1]
|
||||||
else:
|
else:
|
||||||
blocklines = []
|
blocklines = []
|
||||||
|
|
@ -352,15 +352,15 @@ def main():
|
||||||
|
|
||||||
# Ensure there is a line separator before the block of lines to be inserted
|
# Ensure there is a line separator before the block of lines to be inserted
|
||||||
if n0 > 0:
|
if n0 > 0:
|
||||||
if not lines[n0 - 1].endswith(b(os.linesep)):
|
if not lines[n0 - 1].endswith(b_linesep):
|
||||||
lines[n0 - 1] += b(os.linesep)
|
lines[n0 - 1] += b_linesep
|
||||||
|
|
||||||
# Before the block: check if we need to prepend a blank line
|
# Before the block: check if we need to prepend a blank line
|
||||||
# If yes, we need to add the blank line if we are not at the beginning of the file
|
# If yes, we need to add the blank line if we are not at the beginning of the file
|
||||||
# and the previous line is not a blank line
|
# and the previous line is not a blank line
|
||||||
# In both cases, we need to shift by one on the right the inserting position of the block
|
# In both cases, we need to shift by one on the right the inserting position of the block
|
||||||
if params['prepend_newline'] and present:
|
if params['prepend_newline'] and present:
|
||||||
if n0 != 0 and lines[n0 - 1] != b(os.linesep):
|
if n0 != 0 and lines[n0 - 1] != b_linesep:
|
||||||
lines[n0:n0] = blank_line
|
lines[n0:n0] = blank_line
|
||||||
n0 += 1
|
n0 += 1
|
||||||
|
|
||||||
|
|
@ -372,7 +372,7 @@ def main():
|
||||||
# and the line right after is not a blank line
|
# and the line right after is not a blank line
|
||||||
if params['append_newline'] and present:
|
if params['append_newline'] and present:
|
||||||
line_after_block = n0 + len(blocklines)
|
line_after_block = n0 + len(blocklines)
|
||||||
if line_after_block < len(lines) and lines[line_after_block] != b(os.linesep):
|
if line_after_block < len(lines) and lines[line_after_block] != b_linesep:
|
||||||
lines[line_after_block:line_after_block] = blank_line
|
lines[line_after_block:line_after_block] = blank_line
|
||||||
|
|
||||||
if lines:
|
if lines:
|
||||||
|
|
|
||||||
|
|
@ -219,13 +219,13 @@ import os
|
||||||
import platform
|
import platform
|
||||||
import pwd
|
import pwd
|
||||||
import re
|
import re
|
||||||
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.common.file import S_IRWU_RWG_RWO
|
from ansible.module_utils.common.file import S_IRWU_RWG_RWO
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
|
||||||
|
|
||||||
|
|
||||||
class CronTabError(Exception):
|
class CronTabError(Exception):
|
||||||
|
|
@ -529,13 +529,13 @@ class CronTab(object):
|
||||||
user = ''
|
user = ''
|
||||||
if self.user:
|
if self.user:
|
||||||
if platform.system() == 'SunOS':
|
if platform.system() == 'SunOS':
|
||||||
return "su %s -c '%s -l'" % (shlex_quote(self.user), shlex_quote(self.cron_cmd))
|
return "su %s -c '%s -l'" % (shlex.quote(self.user), shlex.quote(self.cron_cmd))
|
||||||
elif platform.system() == 'AIX':
|
elif platform.system() == 'AIX':
|
||||||
return "%s -l %s" % (shlex_quote(self.cron_cmd), shlex_quote(self.user))
|
return "%s -l %s" % (shlex.quote(self.cron_cmd), shlex.quote(self.user))
|
||||||
elif platform.system() == 'HP-UX':
|
elif platform.system() == 'HP-UX':
|
||||||
return "%s %s %s" % (self.cron_cmd, '-l', shlex_quote(self.user))
|
return "%s %s %s" % (self.cron_cmd, '-l', shlex.quote(self.user))
|
||||||
elif pwd.getpwuid(os.getuid())[0] != self.user:
|
elif pwd.getpwuid(os.getuid())[0] != self.user:
|
||||||
user = '-u %s' % shlex_quote(self.user)
|
user = '-u %s' % shlex.quote(self.user)
|
||||||
return "%s %s %s" % (self.cron_cmd, user, '-l')
|
return "%s %s %s" % (self.cron_cmd, user, '-l')
|
||||||
|
|
||||||
def _write_execute(self, path):
|
def _write_execute(self, path):
|
||||||
|
|
@ -546,10 +546,10 @@ class CronTab(object):
|
||||||
if self.user:
|
if self.user:
|
||||||
if platform.system() in ['SunOS', 'HP-UX', 'AIX']:
|
if platform.system() in ['SunOS', 'HP-UX', 'AIX']:
|
||||||
return "chown %s %s ; su '%s' -c '%s %s'" % (
|
return "chown %s %s ; su '%s' -c '%s %s'" % (
|
||||||
shlex_quote(self.user), shlex_quote(path), shlex_quote(self.user), self.cron_cmd, shlex_quote(path))
|
shlex.quote(self.user), shlex.quote(path), shlex.quote(self.user), self.cron_cmd, shlex.quote(path))
|
||||||
elif pwd.getpwuid(os.getuid())[0] != self.user:
|
elif pwd.getpwuid(os.getuid())[0] != self.user:
|
||||||
user = '-u %s' % shlex_quote(self.user)
|
user = '-u %s' % shlex.quote(self.user)
|
||||||
return "%s %s %s" % (self.cron_cmd, user, shlex_quote(path))
|
return "%s %s %s" % (self.cron_cmd, user, shlex.quote(path))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
||||||
|
|
@ -250,7 +250,6 @@ from ansible.module_utils.common.file import S_IRWXU_RXG_RXO, S_IRWU_RG_RO
|
||||||
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
||||||
from ansible.module_utils.common.text.converters import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils.six import raise_from # type: ignore[attr-defined]
|
|
||||||
from ansible.module_utils.urls import generic_urlparse
|
from ansible.module_utils.urls import generic_urlparse
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.module_utils.urls import get_user_agent
|
from ansible.module_utils.urls import get_user_agent
|
||||||
|
|
@ -339,7 +338,7 @@ def write_signed_by_key(module, v, slug):
|
||||||
try:
|
try:
|
||||||
r = open_url(v, http_agent=get_user_agent())
|
r = open_url(v, http_agent=get_user_agent())
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise_from(RuntimeError(to_native(exc)), exc)
|
raise RuntimeError('Could not fetch signed_by key.') from exc
|
||||||
else:
|
else:
|
||||||
b_data = r.read()
|
b_data = r.read()
|
||||||
else:
|
else:
|
||||||
|
|
@ -587,14 +586,9 @@ def main():
|
||||||
elif is_sequence(value):
|
elif is_sequence(value):
|
||||||
value = format_list(value)
|
value = format_list(value)
|
||||||
elif key == 'signed_by':
|
elif key == 'signed_by':
|
||||||
try:
|
key_changed, signed_by_filename, signed_by_data = write_signed_by_key(module, value, slug)
|
||||||
key_changed, signed_by_filename, signed_by_data = write_signed_by_key(module, value, slug)
|
value = signed_by_filename or signed_by_data
|
||||||
value = signed_by_filename or signed_by_data
|
changed |= key_changed
|
||||||
changed |= key_changed
|
|
||||||
except RuntimeError as exc:
|
|
||||||
module.fail_json(
|
|
||||||
msg='Could not fetch signed_by key: %s' % to_native(exc)
|
|
||||||
)
|
|
||||||
|
|
||||||
if value.count('\n') > 0:
|
if value.count('\n') > 0:
|
||||||
value = format_multiline(value)
|
value = format_multiline(value)
|
||||||
|
|
|
||||||
|
|
@ -291,7 +291,6 @@ import time
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_text, to_native
|
from ansible.module_utils.common.text.converters import to_text, to_native
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
|
|
||||||
|
|
||||||
class _Object:
|
class _Object:
|
||||||
|
|
@ -496,7 +495,7 @@ def main():
|
||||||
|
|
||||||
params = module.params
|
params = module.params
|
||||||
|
|
||||||
if params['mode'] and not isinstance(params['mode'], string_types):
|
if params['mode'] and not isinstance(params['mode'], str):
|
||||||
module.fail_json(
|
module.fail_json(
|
||||||
msg="argument 'mode' is not a string and conversion is not allowed, value is of type %s" % params['mode'].__class__.__name__
|
msg="argument 'mode' is not a string and conversion is not allowed, value is of type %s" % params['mode'].__class__.__name__
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -374,9 +374,9 @@ import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlsplit
|
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils.urls import fetch_url, url_argument_spec
|
from ansible.module_utils.urls import fetch_url, url_argument_spec
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -343,7 +343,6 @@ from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
from ansible.module_utils.six import b, string_types
|
|
||||||
|
|
||||||
|
|
||||||
def relocate_repo(module, result, repo_dir, old_repo_dir, worktree_dir):
|
def relocate_repo(module, result, repo_dir, old_repo_dir, worktree_dir):
|
||||||
|
|
@ -443,12 +442,12 @@ def write_ssh_wrapper(module):
|
||||||
fd, wrapper_path = tempfile.mkstemp()
|
fd, wrapper_path = tempfile.mkstemp()
|
||||||
|
|
||||||
# use existing git_ssh/ssh_command, fallback to 'ssh'
|
# use existing git_ssh/ssh_command, fallback to 'ssh'
|
||||||
template = b("""#!/bin/sh
|
template = """#!/bin/sh
|
||||||
%s $GIT_SSH_OPTS "$@"
|
%s $GIT_SSH_OPTS "$@"
|
||||||
""" % os.environ.get('GIT_SSH', os.environ.get('GIT_SSH_COMMAND', 'ssh')))
|
""" % os.environ.get('GIT_SSH', os.environ.get('GIT_SSH_COMMAND', 'ssh'))
|
||||||
|
|
||||||
# write it
|
# write it
|
||||||
with os.fdopen(fd, 'w+b') as fh:
|
with os.fdopen(fd, 'w') as fh:
|
||||||
fh.write(template)
|
fh.write(template)
|
||||||
|
|
||||||
# set execute
|
# set execute
|
||||||
|
|
@ -1257,7 +1256,7 @@ def main():
|
||||||
|
|
||||||
# evaluate and set the umask before doing anything else
|
# evaluate and set the umask before doing anything else
|
||||||
if umask is not None:
|
if umask is not None:
|
||||||
if not isinstance(umask, string_types):
|
if not isinstance(umask, str):
|
||||||
module.fail_json(msg="umask must be defined as a quoted octal integer")
|
module.fail_json(msg="umask must be defined as a quoted octal integer")
|
||||||
try:
|
try:
|
||||||
umask = int(umask, 8)
|
umask = int(umask, 8)
|
||||||
|
|
|
||||||
|
|
@ -180,7 +180,6 @@ from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||||
from ansible.module_utils.common.sys_info import get_platform_subclass
|
from ansible.module_utils.common.sys_info import get_platform_subclass
|
||||||
from ansible.module_utils.service import fail_if_missing, is_systemd_managed
|
from ansible.module_utils.service import fail_if_missing, is_systemd_managed
|
||||||
from ansible.module_utils.six import b
|
|
||||||
|
|
||||||
|
|
||||||
class Service(object):
|
class Service(object):
|
||||||
|
|
@ -292,8 +291,8 @@ class Service(object):
|
||||||
# chkconfig localizes messages and we're screen scraping so make
|
# chkconfig localizes messages and we're screen scraping so make
|
||||||
# sure we use the C locale
|
# sure we use the C locale
|
||||||
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=lang_env, preexec_fn=lambda: os.close(pipe[1]))
|
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=lang_env, preexec_fn=lambda: os.close(pipe[1]))
|
||||||
stdout = b("")
|
stdout = b""
|
||||||
stderr = b("")
|
stderr = b""
|
||||||
fds = [p.stdout, p.stderr]
|
fds = [p.stdout, p.stderr]
|
||||||
# Wait for all output, or until the main process is dead and its output is done.
|
# Wait for all output, or until the main process is dead and its output is done.
|
||||||
while fds:
|
while fds:
|
||||||
|
|
@ -322,7 +321,7 @@ class Service(object):
|
||||||
os.close(pipe[1])
|
os.close(pipe[1])
|
||||||
os.waitpid(pid, 0)
|
os.waitpid(pid, 0)
|
||||||
# Wait for data from daemon process and process it.
|
# Wait for data from daemon process and process it.
|
||||||
data = b("")
|
data = b""
|
||||||
while True:
|
while True:
|
||||||
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
|
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
|
||||||
if pipe[0] in rfd:
|
if pipe[0] in rfd:
|
||||||
|
|
|
||||||
|
|
@ -438,13 +438,12 @@ import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from collections.abc import Mapping, Sequence
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
from urllib.parse import urlencode, urljoin
|
||||||
|
|
||||||
from ansible.module_utils.basic import AnsibleModule, sanitize_keys
|
from ansible.module_utils.basic import AnsibleModule, sanitize_keys
|
||||||
from ansible.module_utils.six import binary_type, iteritems, string_types
|
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlencode, urljoin
|
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.module_utils.six.moves.collections_abc import Mapping, Sequence
|
|
||||||
from ansible.module_utils.urls import (
|
from ansible.module_utils.urls import (
|
||||||
fetch_url,
|
fetch_url,
|
||||||
get_response_filename,
|
get_response_filename,
|
||||||
|
|
@ -479,7 +478,7 @@ def write_file(module, dest, content, resp):
|
||||||
try:
|
try:
|
||||||
fd, tmpsrc = tempfile.mkstemp(dir=module.tmpdir)
|
fd, tmpsrc = tempfile.mkstemp(dir=module.tmpdir)
|
||||||
with os.fdopen(fd, 'wb') as f:
|
with os.fdopen(fd, 'wb') as f:
|
||||||
if isinstance(content, binary_type):
|
if isinstance(content, bytes):
|
||||||
f.write(content)
|
f.write(content)
|
||||||
else:
|
else:
|
||||||
shutil.copyfileobj(content, f)
|
shutil.copyfileobj(content, f)
|
||||||
|
|
@ -521,14 +520,14 @@ def kv_list(data):
|
||||||
|
|
||||||
def form_urlencoded(body):
|
def form_urlencoded(body):
|
||||||
""" Convert data into a form-urlencoded string """
|
""" Convert data into a form-urlencoded string """
|
||||||
if isinstance(body, string_types):
|
if isinstance(body, str):
|
||||||
return body
|
return body
|
||||||
|
|
||||||
if isinstance(body, (Mapping, Sequence)):
|
if isinstance(body, (Mapping, Sequence)):
|
||||||
result = []
|
result = []
|
||||||
# Turn a list of lists into a list of tuples that urlencode accepts
|
# Turn a list of lists into a list of tuples that urlencode accepts
|
||||||
for key, values in kv_list(body):
|
for key, values in kv_list(body):
|
||||||
if isinstance(values, string_types) or not isinstance(values, (Mapping, Sequence)):
|
if isinstance(values, str) or not isinstance(values, (Mapping, Sequence)):
|
||||||
values = [values]
|
values = [values]
|
||||||
for value in values:
|
for value in values:
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
|
@ -641,12 +640,12 @@ def main():
|
||||||
|
|
||||||
if body_format == 'json':
|
if body_format == 'json':
|
||||||
# Encode the body unless its a string, then assume it is pre-formatted JSON
|
# Encode the body unless its a string, then assume it is pre-formatted JSON
|
||||||
if not isinstance(body, string_types):
|
if not isinstance(body, str):
|
||||||
body = json.dumps(body)
|
body = json.dumps(body)
|
||||||
if 'content-type' not in [header.lower() for header in dict_headers]:
|
if 'content-type' not in [header.lower() for header in dict_headers]:
|
||||||
dict_headers['Content-Type'] = 'application/json'
|
dict_headers['Content-Type'] = 'application/json'
|
||||||
elif body_format == 'form-urlencoded':
|
elif body_format == 'form-urlencoded':
|
||||||
if not isinstance(body, string_types):
|
if not isinstance(body, str):
|
||||||
try:
|
try:
|
||||||
body = form_urlencoded(body)
|
body = form_urlencoded(body)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
|
|
@ -747,7 +746,7 @@ def main():
|
||||||
# In python3, the headers are title cased. Lowercase them to be
|
# In python3, the headers are title cased. Lowercase them to be
|
||||||
# compatible with the python2 behaviour.
|
# compatible with the python2 behaviour.
|
||||||
uresp = {}
|
uresp = {}
|
||||||
for key, value in iteritems(resp):
|
for key, value in resp.items():
|
||||||
ukey = key.replace("-", "_").lower()
|
ukey = key.replace("-", "_").lower()
|
||||||
uresp[ukey] = value
|
uresp[ukey] = value
|
||||||
|
|
||||||
|
|
@ -755,7 +754,7 @@ def main():
|
||||||
uresp['location'] = urljoin(url, uresp['location'])
|
uresp['location'] = urljoin(url, uresp['location'])
|
||||||
|
|
||||||
# Default content_encoding to try
|
# Default content_encoding to try
|
||||||
if isinstance(content, binary_type):
|
if isinstance(content, bytes):
|
||||||
u_content = to_text(content, encoding=content_encoding)
|
u_content = to_text(content, encoding=content_encoding)
|
||||||
if maybe_json:
|
if maybe_json:
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,6 @@ from ansible._internal._errors import _error_utils
|
||||||
from ansible.module_utils.basic import is_executable
|
from ansible.module_utils.basic import is_executable
|
||||||
from ansible._internal._datatag._tags import Origin, TrustedAsTemplate, SourceWasEncrypted
|
from ansible._internal._datatag._tags import Origin, TrustedAsTemplate, SourceWasEncrypted
|
||||||
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
||||||
from ansible.module_utils.six import binary_type, text_type
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.parsing.quoting import unquote
|
from ansible.parsing.quoting import unquote
|
||||||
from ansible.parsing.utils.yaml import from_yaml
|
from ansible.parsing.utils.yaml import from_yaml
|
||||||
|
|
@ -418,7 +417,7 @@ class DataLoader:
|
||||||
Temporary files are cleanup in the destructor
|
Temporary files are cleanup in the destructor
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not file_path or not isinstance(file_path, (binary_type, text_type)):
|
if not file_path or not isinstance(file_path, (bytes, str)):
|
||||||
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))
|
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))
|
||||||
|
|
||||||
b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
|
b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
|
||||||
|
|
|
||||||
|
|
@ -59,7 +59,6 @@ except ImportError:
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.module_utils.six import binary_type
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.path import makedirs_safe, unfrackpath
|
from ansible.utils.path import makedirs_safe, unfrackpath
|
||||||
|
|
@ -1237,7 +1236,7 @@ class VaultAES256:
|
||||||
|
|
||||||
It would be nice if there were a library for this but hey.
|
It would be nice if there were a library for this but hey.
|
||||||
"""
|
"""
|
||||||
if not (isinstance(b_a, binary_type) and isinstance(b_b, binary_type)):
|
if not (isinstance(b_a, bytes) and isinstance(b_b, bytes)):
|
||||||
raise TypeError('_is_equal can only be used to compare two byte strings')
|
raise TypeError('_is_equal can only be used to compare two byte strings')
|
||||||
|
|
||||||
# http://codahale.com/a-lesson-in-timing-attacks/
|
# http://codahale.com/a-lesson-in-timing-attacks/
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,6 @@ from ansible import context
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError, AnsibleValueOmittedError, AnsibleFieldAttributeError
|
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError, AnsibleValueOmittedError, AnsibleFieldAttributeError
|
||||||
from ansible.module_utils.datatag import native_type_name
|
from ansible.module_utils.datatag import native_type_name
|
||||||
from ansible._internal._datatag._tags import Origin
|
from ansible._internal._datatag._tags import Origin
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.parsing.convert_bool import boolean
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
from ansible.module_utils.common.sentinel import Sentinel
|
from ansible.module_utils.common.sentinel import Sentinel
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
|
@ -37,7 +36,7 @@ display = Display()
|
||||||
def _validate_action_group_metadata(action, found_group_metadata, fq_group_name):
|
def _validate_action_group_metadata(action, found_group_metadata, fq_group_name):
|
||||||
valid_metadata = {
|
valid_metadata = {
|
||||||
'extend_group': {
|
'extend_group': {
|
||||||
'types': (list, string_types,),
|
'types': (list, str,),
|
||||||
'errortype': 'list',
|
'errortype': 'list',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -204,7 +203,7 @@ class FieldAttributeBase:
|
||||||
value = self.set_to_context(attr.name)
|
value = self.set_to_context(attr.name)
|
||||||
|
|
||||||
valid_values = frozenset(('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never'))
|
valid_values = frozenset(('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never'))
|
||||||
if value and isinstance(value, string_types) and value not in valid_values:
|
if value and isinstance(value, str) and value not in valid_values:
|
||||||
raise AnsibleParserError("'%s' is not a valid value for debugger. Must be one of %s" % (value, ', '.join(valid_values)), obj=self.get_ds())
|
raise AnsibleParserError("'%s' is not a valid value for debugger. Must be one of %s" % (value, ', '.join(valid_values)), obj=self.get_ds())
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
@ -350,14 +349,14 @@ class FieldAttributeBase:
|
||||||
found_group_metadata = False
|
found_group_metadata = False
|
||||||
for action in action_group:
|
for action in action_group:
|
||||||
# Everything should be a string except the metadata entry
|
# Everything should be a string except the metadata entry
|
||||||
if not isinstance(action, string_types):
|
if not isinstance(action, str):
|
||||||
_validate_action_group_metadata(action, found_group_metadata, fq_group_name)
|
_validate_action_group_metadata(action, found_group_metadata, fq_group_name)
|
||||||
|
|
||||||
if isinstance(action['metadata'], dict):
|
if isinstance(action['metadata'], dict):
|
||||||
found_group_metadata = True
|
found_group_metadata = True
|
||||||
|
|
||||||
include_groups = action['metadata'].get('extend_group', [])
|
include_groups = action['metadata'].get('extend_group', [])
|
||||||
if isinstance(include_groups, string_types):
|
if isinstance(include_groups, str):
|
||||||
include_groups = [include_groups]
|
include_groups = [include_groups]
|
||||||
if not isinstance(include_groups, list):
|
if not isinstance(include_groups, list):
|
||||||
# Bad entries may be a warning above, but prevent tracebacks by setting it back to the acceptable type.
|
# Bad entries may be a warning above, but prevent tracebacks by setting it back to the acceptable type.
|
||||||
|
|
@ -472,7 +471,7 @@ class FieldAttributeBase:
|
||||||
elif attribute.isa == 'percent':
|
elif attribute.isa == 'percent':
|
||||||
# special value, which may be an integer or float
|
# special value, which may be an integer or float
|
||||||
# with an optional '%' at the end
|
# with an optional '%' at the end
|
||||||
if isinstance(value, string_types) and '%' in value:
|
if isinstance(value, str) and '%' in value:
|
||||||
value = value.replace('%', '')
|
value = value.replace('%', '')
|
||||||
value = float(value)
|
value = float(value)
|
||||||
elif attribute.isa == 'list':
|
elif attribute.isa == 'list':
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.playbook.attribute import FieldAttribute
|
from ansible.playbook.attribute import FieldAttribute
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
@ -32,7 +31,7 @@ def _ensure_default_collection(collection_list=None):
|
||||||
class CollectionSearch:
|
class CollectionSearch:
|
||||||
|
|
||||||
# this needs to be populated before we can resolve tasks/roles/etc
|
# this needs to be populated before we can resolve tasks/roles/etc
|
||||||
collections = FieldAttribute(isa='list', listof=string_types, priority=100, default=_ensure_default_collection, always_post_validate=True, static=True)
|
collections = FieldAttribute(isa='list', listof=(str,), priority=100, default=_ensure_default_collection, always_post_validate=True, static=True)
|
||||||
|
|
||||||
def _load_collections(self, attr, ds):
|
def _load_collections(self, attr, ds):
|
||||||
# We are always a mixin with Base, so we can validate this untemplated
|
# We are always a mixin with Base, so we can validate this untemplated
|
||||||
|
|
|
||||||
|
|
@ -20,12 +20,11 @@ from __future__ import annotations
|
||||||
from ansible.errors import AnsibleAssertionError
|
from ansible.errors import AnsibleAssertionError
|
||||||
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
||||||
from ansible.playbook.task import Task
|
from ansible.playbook.task import Task
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
|
|
||||||
|
|
||||||
class Handler(Task):
|
class Handler(Task):
|
||||||
|
|
||||||
listen = NonInheritableFieldAttribute(isa='list', default=list, listof=string_types, static=True)
|
listen = NonInheritableFieldAttribute(isa='list', default=list, listof=(str,), static=True)
|
||||||
|
|
||||||
def __init__(self, block=None, role=None, task_include=None):
|
def __init__(self, block=None, role=None, task_include=None):
|
||||||
self.notified_hosts = []
|
self.notified_hosts = []
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,6 @@ from ansible import context
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.errors import AnsibleParserError, AnsibleAssertionError
|
from ansible.errors import AnsibleParserError, AnsibleAssertionError
|
||||||
from ansible.module_utils.common.collections import is_sequence
|
from ansible.module_utils.common.collections import is_sequence
|
||||||
from ansible.module_utils.six import binary_type, string_types, text_type
|
|
||||||
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
||||||
from ansible.playbook.base import Base
|
from ansible.playbook.base import Base
|
||||||
from ansible.playbook.block import Block
|
from ansible.playbook.block import Block
|
||||||
|
|
@ -53,11 +52,11 @@ class Play(Base, Taggable, CollectionSearch):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# =================================================================================
|
# =================================================================================
|
||||||
hosts = NonInheritableFieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True, priority=-2)
|
hosts = NonInheritableFieldAttribute(isa='list', required=True, listof=(str,), always_post_validate=True, priority=-2)
|
||||||
|
|
||||||
# Facts
|
# Facts
|
||||||
gather_facts = NonInheritableFieldAttribute(isa='bool', default=None, always_post_validate=True)
|
gather_facts = NonInheritableFieldAttribute(isa='bool', default=None, always_post_validate=True)
|
||||||
gather_subset = NonInheritableFieldAttribute(isa='list', default=None, listof=string_types, always_post_validate=True)
|
gather_subset = NonInheritableFieldAttribute(isa='list', default=None, listof=(str,), always_post_validate=True)
|
||||||
gather_timeout = NonInheritableFieldAttribute(isa='int', default=None, always_post_validate=True)
|
gather_timeout = NonInheritableFieldAttribute(isa='int', default=None, always_post_validate=True)
|
||||||
fact_path = NonInheritableFieldAttribute(isa='string', default=None)
|
fact_path = NonInheritableFieldAttribute(isa='string', default=None)
|
||||||
|
|
||||||
|
|
@ -120,10 +119,10 @@ class Play(Base, Taggable, CollectionSearch):
|
||||||
for entry in value:
|
for entry in value:
|
||||||
if entry is None:
|
if entry is None:
|
||||||
raise AnsibleParserError("Hosts list cannot contain values of 'None'. Please check your playbook")
|
raise AnsibleParserError("Hosts list cannot contain values of 'None'. Please check your playbook")
|
||||||
elif not isinstance(entry, (binary_type, text_type)):
|
elif not isinstance(entry, (bytes, str)):
|
||||||
raise AnsibleParserError("Hosts list contains an invalid host value: '{host!s}'".format(host=entry))
|
raise AnsibleParserError("Hosts list contains an invalid host value: '{host!s}'".format(host=entry))
|
||||||
|
|
||||||
elif not isinstance(value, (binary_type, text_type, EncryptedString)):
|
elif not isinstance(value, (bytes, str, EncryptedString)):
|
||||||
raise AnsibleParserError("Hosts list must be a sequence or string. Please check your playbook.")
|
raise AnsibleParserError("Hosts list must be a sequence or string. Please check your playbook.")
|
||||||
|
|
||||||
def get_name(self):
|
def get_name(self):
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,6 @@ from ansible import constants as C
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError
|
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError
|
||||||
from ansible.module_utils.common.sentinel import Sentinel
|
from ansible.module_utils.common.sentinel import Sentinel
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.six import binary_type, text_type
|
|
||||||
from ansible.playbook.base import Base
|
from ansible.playbook.base import Base
|
||||||
from ansible.playbook.collectionsearch import CollectionSearch
|
from ansible.playbook.collectionsearch import CollectionSearch
|
||||||
from ansible.playbook.conditional import Conditional
|
from ansible.playbook.conditional import Conditional
|
||||||
|
|
@ -74,7 +73,7 @@ def hash_params(params):
|
||||||
# Any container is unhashable if it contains unhashable items (for
|
# Any container is unhashable if it contains unhashable items (for
|
||||||
# instance, tuple() is a Hashable subclass but if it contains a dict, it
|
# instance, tuple() is a Hashable subclass but if it contains a dict, it
|
||||||
# cannot be hashed)
|
# cannot be hashed)
|
||||||
if isinstance(params, Container) and not isinstance(params, (text_type, binary_type)):
|
if isinstance(params, Container) and not isinstance(params, (str, bytes)):
|
||||||
if isinstance(params, Mapping):
|
if isinstance(params, Mapping):
|
||||||
try:
|
try:
|
||||||
# Optimistically hope the contents are all hashable
|
# Optimistically hope the contents are all hashable
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,6 @@ import os
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||||
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
||||||
from ansible.playbook.base import Base
|
from ansible.playbook.base import Base
|
||||||
from ansible.playbook.collectionsearch import CollectionSearch
|
from ansible.playbook.collectionsearch import CollectionSearch
|
||||||
|
|
@ -70,7 +69,7 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch):
|
||||||
if isinstance(ds, int):
|
if isinstance(ds, int):
|
||||||
ds = "%s" % ds
|
ds = "%s" % ds
|
||||||
|
|
||||||
if not isinstance(ds, dict) and not isinstance(ds, string_types):
|
if not isinstance(ds, dict) and not isinstance(ds, str):
|
||||||
raise AnsibleAssertionError()
|
raise AnsibleAssertionError()
|
||||||
|
|
||||||
if isinstance(ds, dict):
|
if isinstance(ds, dict):
|
||||||
|
|
@ -113,11 +112,11 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch):
|
||||||
string), just that string
|
string), just that string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if isinstance(ds, string_types):
|
if isinstance(ds, str):
|
||||||
return ds
|
return ds
|
||||||
|
|
||||||
role_name = ds.get('role', ds.get('name'))
|
role_name = ds.get('role', ds.get('name'))
|
||||||
if not role_name or not isinstance(role_name, string_types):
|
if not role_name or not isinstance(role_name, str):
|
||||||
raise AnsibleError('role definitions must contain a role name', obj=ds)
|
raise AnsibleError('role definitions must contain a role name', obj=ds)
|
||||||
|
|
||||||
# if we have the required datastructures, and if the role_name
|
# if we have the required datastructures, and if the role_name
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.playbook.delegatable import Delegatable
|
from ansible.playbook.delegatable import Delegatable
|
||||||
from ansible.playbook.role.definition import RoleDefinition
|
from ansible.playbook.role.definition import RoleDefinition
|
||||||
|
|
||||||
|
|
@ -40,10 +39,10 @@ class RoleInclude(RoleDefinition, Delegatable):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None, collection_list=None):
|
def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None, collection_list=None):
|
||||||
|
|
||||||
if not (isinstance(data, string_types) or isinstance(data, dict)):
|
if not (isinstance(data, str) or isinstance(data, dict)):
|
||||||
raise AnsibleParserError("Invalid role definition.", obj=data)
|
raise AnsibleParserError("Invalid role definition.", obj=data)
|
||||||
|
|
||||||
if isinstance(data, string_types) and ',' in data:
|
if isinstance(data, str) and ',' in data:
|
||||||
raise AnsibleError("Invalid old style role requirement: %s" % data)
|
raise AnsibleError("Invalid old style role requirement: %s" % data)
|
||||||
|
|
||||||
ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader, collection_list=collection_list)
|
ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader, collection_list=collection_list)
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,6 @@ from __future__ import annotations
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from ansible.errors import AnsibleParserError, AnsibleError
|
from ansible.errors import AnsibleParserError, AnsibleError
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
||||||
from ansible.playbook.base import Base
|
from ansible.playbook.base import Base
|
||||||
from ansible.playbook.collectionsearch import CollectionSearch
|
from ansible.playbook.collectionsearch import CollectionSearch
|
||||||
|
|
@ -70,7 +69,7 @@ class RoleMetadata(Base, CollectionSearch):
|
||||||
|
|
||||||
for role_def in ds:
|
for role_def in ds:
|
||||||
# FIXME: consolidate with ansible-galaxy to keep this in sync
|
# FIXME: consolidate with ansible-galaxy to keep this in sync
|
||||||
if isinstance(role_def, string_types) or 'role' in role_def or 'name' in role_def:
|
if isinstance(role_def, str) or 'role' in role_def or 'name' in role_def:
|
||||||
roles.append(role_def)
|
roles.append(role_def)
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.playbook.role.definition import RoleDefinition
|
from ansible.playbook.role.definition import RoleDefinition
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.galaxy import scm_archive_resource
|
from ansible.utils.galaxy import scm_archive_resource
|
||||||
|
|
@ -65,7 +64,7 @@ class RoleRequirement(RoleDefinition):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def role_yaml_parse(role):
|
def role_yaml_parse(role):
|
||||||
|
|
||||||
if isinstance(role, string_types):
|
if isinstance(role, str):
|
||||||
name = None
|
name = None
|
||||||
scm = None
|
scm = None
|
||||||
src = None
|
src = None
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,6 @@ from ansible.playbook.task_include import TaskInclude
|
||||||
from ansible.playbook.role import Role
|
from ansible.playbook.role import Role
|
||||||
from ansible.playbook.role.include import RoleInclude
|
from ansible.playbook.role.include import RoleInclude
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible._internal._templating._engine import TemplateEngine
|
from ansible._internal._templating._engine import TemplateEngine
|
||||||
|
|
||||||
__all__ = ['IncludeRole']
|
__all__ = ['IncludeRole']
|
||||||
|
|
@ -137,7 +136,7 @@ class IncludeRole(TaskInclude):
|
||||||
for key in my_arg_names.intersection(IncludeRole.FROM_ARGS):
|
for key in my_arg_names.intersection(IncludeRole.FROM_ARGS):
|
||||||
from_key = key.removesuffix('_from')
|
from_key = key.removesuffix('_from')
|
||||||
args_value = ir.args.get(key)
|
args_value = ir.args.get(key)
|
||||||
if not isinstance(args_value, string_types):
|
if not isinstance(args_value, str):
|
||||||
raise AnsibleParserError('Expected a string for %s but got %s instead' % (key, type(args_value)))
|
raise AnsibleParserError('Expected a string for %s but got %s instead' % (key, type(args_value)))
|
||||||
ir._from_files[from_key] = args_value
|
ir._from_files[from_key] = args_value
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,6 @@ from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVar
|
||||||
from ansible.executor.module_common import _get_action_arg_defaults
|
from ansible.executor.module_common import _get_action_arg_defaults
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.parsing.mod_args import ModuleArgsParser, RAW_PARAM_MODULES
|
from ansible.parsing.mod_args import ModuleArgsParser, RAW_PARAM_MODULES
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.plugins.loader import action_loader, module_loader, lookup_loader
|
from ansible.plugins.loader import action_loader, module_loader, lookup_loader
|
||||||
|
|
@ -161,7 +160,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl
|
||||||
def _merge_kv(self, ds):
|
def _merge_kv(self, ds):
|
||||||
if ds is None:
|
if ds is None:
|
||||||
return ""
|
return ""
|
||||||
elif isinstance(ds, string_types):
|
elif isinstance(ds, str):
|
||||||
return ds
|
return ds
|
||||||
elif isinstance(ds, dict):
|
elif isinstance(ds, dict):
|
||||||
buf = ""
|
buf = ""
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,6 @@ from ansible.module_utils.common.arg_spec import ArgumentSpecValidator
|
||||||
from ansible.module_utils.errors import UnsupportedError
|
from ansible.module_utils.errors import UnsupportedError
|
||||||
from ansible.module_utils.json_utils import _filter_non_json_lines
|
from ansible.module_utils.json_utils import _filter_non_json_lines
|
||||||
from ansible.module_utils.common.json import Direction, get_module_encoder, get_module_decoder
|
from ansible.module_utils.common.json import Direction, get_module_encoder, get_module_decoder
|
||||||
from ansible.module_utils.six import binary_type, string_types, text_type
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.release import __version__
|
from ansible.release import __version__
|
||||||
from ansible.utils.collection_loader import resource_from_fqcr
|
from ansible.utils.collection_loader import resource_from_fqcr
|
||||||
|
|
@ -52,7 +51,7 @@ if t.TYPE_CHECKING:
|
||||||
|
|
||||||
|
|
||||||
def _validate_utf8_json(d):
|
def _validate_utf8_json(d):
|
||||||
if isinstance(d, text_type):
|
if isinstance(d, str):
|
||||||
# Purposefully not using to_bytes here for performance reasons
|
# Purposefully not using to_bytes here for performance reasons
|
||||||
d.encode(encoding='utf-8', errors='strict')
|
d.encode(encoding='utf-8', errors='strict')
|
||||||
elif isinstance(d, dict):
|
elif isinstance(d, dict):
|
||||||
|
|
@ -874,7 +873,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin):
|
||||||
# happens sometimes when it is a dir and not on bsd
|
# happens sometimes when it is a dir and not on bsd
|
||||||
if 'checksum' not in mystat['stat']:
|
if 'checksum' not in mystat['stat']:
|
||||||
mystat['stat']['checksum'] = ''
|
mystat['stat']['checksum'] = ''
|
||||||
elif not isinstance(mystat['stat']['checksum'], string_types):
|
elif not isinstance(mystat['stat']['checksum'], str):
|
||||||
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
|
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
|
||||||
|
|
||||||
return mystat['stat']
|
return mystat['stat']
|
||||||
|
|
@ -1084,7 +1083,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin):
|
||||||
# the remote system, which can be read and parsed by the module
|
# the remote system, which can be read and parsed by the module
|
||||||
args_data = ""
|
args_data = ""
|
||||||
for k, v in module_args.items():
|
for k, v in module_args.items():
|
||||||
args_data += '%s=%s ' % (k, shlex.quote(text_type(v)))
|
args_data += '%s=%s ' % (k, shlex.quote(str(v)))
|
||||||
self._transfer_data(args_file_path, args_data)
|
self._transfer_data(args_file_path, args_data)
|
||||||
elif module_style in ('non_native_want_json', 'binary'):
|
elif module_style in ('non_native_want_json', 'binary'):
|
||||||
profile_encoder = get_module_encoder(module_bits.serialization_profile, Direction.CONTROLLER_TO_MODULE)
|
profile_encoder = get_module_encoder(module_bits.serialization_profile, Direction.CONTROLLER_TO_MODULE)
|
||||||
|
|
@ -1169,7 +1168,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin):
|
||||||
self._cleanup_remote_tmp = False
|
self._cleanup_remote_tmp = False
|
||||||
|
|
||||||
# NOTE: dnf returns results .. but that made it 'compatible' with squashing, so we allow mappings, for now
|
# NOTE: dnf returns results .. but that made it 'compatible' with squashing, so we allow mappings, for now
|
||||||
if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], string_types)):
|
if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], str)):
|
||||||
data['ansible_module_results'] = data['results']
|
data['ansible_module_results'] = data['results']
|
||||||
del data['results']
|
del data['results']
|
||||||
display.warning("Found internal 'results' key in module return, renamed to 'ansible_module_results'.")
|
display.warning("Found internal 'results' key in module return, renamed to 'ansible_module_results'.")
|
||||||
|
|
@ -1322,16 +1321,16 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin):
|
||||||
|
|
||||||
# stdout and stderr may be either a file-like or a bytes object.
|
# stdout and stderr may be either a file-like or a bytes object.
|
||||||
# Convert either one to a text type
|
# Convert either one to a text type
|
||||||
if isinstance(stdout, binary_type):
|
if isinstance(stdout, bytes):
|
||||||
out = to_text(stdout, errors=encoding_errors)
|
out = to_text(stdout, errors=encoding_errors)
|
||||||
elif not isinstance(stdout, text_type):
|
elif not isinstance(stdout, str):
|
||||||
out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
|
out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
|
||||||
else:
|
else:
|
||||||
out = stdout
|
out = stdout
|
||||||
|
|
||||||
if isinstance(stderr, binary_type):
|
if isinstance(stderr, bytes):
|
||||||
err = to_text(stderr, errors=encoding_errors)
|
err = to_text(stderr, errors=encoding_errors)
|
||||||
elif not isinstance(stderr, text_type):
|
elif not isinstance(stderr, str):
|
||||||
err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
|
err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
|
||||||
else:
|
else:
|
||||||
err = stderr
|
err = stderr
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,6 @@ from __future__ import annotations
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
|
|
||||||
from ansible.errors import AnsibleActionFail
|
from ansible.errors import AnsibleActionFail
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.parsing.utils.addresses import parse_address
|
from ansible.parsing.utils.addresses import parse_address
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
@ -74,7 +73,7 @@ class ActionModule(ActionBase):
|
||||||
if groups:
|
if groups:
|
||||||
if isinstance(groups, list):
|
if isinstance(groups, list):
|
||||||
group_list = groups
|
group_list = groups
|
||||||
elif isinstance(groups, string_types):
|
elif isinstance(groups, str):
|
||||||
group_list = groups.split(",")
|
group_list = groups.split(",")
|
||||||
else:
|
else:
|
||||||
raise AnsibleActionFail("Groups must be specified as a list.", obj=groups)
|
raise AnsibleActionFail("Groups must be specified as a list.", obj=groups)
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,6 @@ import os
|
||||||
import base64
|
import base64
|
||||||
from ansible.errors import AnsibleConnectionFailure, AnsibleError, AnsibleActionFail, AnsibleActionSkip
|
from ansible.errors import AnsibleConnectionFailure, AnsibleError, AnsibleActionFail, AnsibleActionSkip
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.parsing.convert_bool import boolean
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
@ -52,10 +51,10 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
msg = ''
|
msg = ''
|
||||||
# FIXME: validate source and dest are strings; use basic.py and module specs
|
# FIXME: validate source and dest are strings; use basic.py and module specs
|
||||||
if not isinstance(source, string_types):
|
if not isinstance(source, str):
|
||||||
msg = "Invalid type supplied for source option, it must be a string"
|
msg = "Invalid type supplied for source option, it must be a string"
|
||||||
|
|
||||||
if not isinstance(dest, string_types):
|
if not isinstance(dest, str):
|
||||||
msg = "Invalid type supplied for dest option, it must be a string"
|
msg = "Invalid type supplied for dest option, it must be a string"
|
||||||
|
|
||||||
if source is None or dest is None:
|
if source is None or dest is None:
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
|
|
||||||
|
|
||||||
class ActionModule(ActionBase):
|
class ActionModule(ActionBase):
|
||||||
|
|
@ -42,7 +41,7 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
group_name = self._task.args.get('key')
|
group_name = self._task.args.get('key')
|
||||||
parent_groups = self._task.args.get('parents', ['all'])
|
parent_groups = self._task.args.get('parents', ['all'])
|
||||||
if isinstance(parent_groups, string_types):
|
if isinstance(parent_groups, str):
|
||||||
parent_groups = [parent_groups]
|
parent_groups = [parent_groups]
|
||||||
|
|
||||||
result['changed'] = False
|
result['changed'] = False
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ import pathlib
|
||||||
import ansible.constants as C
|
import ansible.constants as C
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible._internal._datatag._tags import SourceWasEncrypted
|
from ansible._internal._datatag._tags import SourceWasEncrypted
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.utils.vars import combine_vars
|
from ansible.utils.vars import combine_vars
|
||||||
|
|
@ -38,7 +37,7 @@ class ActionModule(ActionBase):
|
||||||
if not self.ignore_files:
|
if not self.ignore_files:
|
||||||
self.ignore_files = list()
|
self.ignore_files = list()
|
||||||
|
|
||||||
if isinstance(self.ignore_files, string_types):
|
if isinstance(self.ignore_files, str):
|
||||||
self.ignore_files = self.ignore_files.split()
|
self.ignore_files = self.ignore_files.split()
|
||||||
|
|
||||||
elif isinstance(self.ignore_files, dict):
|
elif isinstance(self.ignore_files, dict):
|
||||||
|
|
@ -66,7 +65,7 @@ class ActionModule(ActionBase):
|
||||||
self.valid_extensions = self._task.args.get('extensions', self.VALID_FILE_EXTENSIONS)
|
self.valid_extensions = self._task.args.get('extensions', self.VALID_FILE_EXTENSIONS)
|
||||||
|
|
||||||
# convert/validate extensions list
|
# convert/validate extensions list
|
||||||
if isinstance(self.valid_extensions, string_types):
|
if isinstance(self.valid_extensions, str):
|
||||||
self.valid_extensions = list(self.valid_extensions)
|
self.valid_extensions = list(self.valid_extensions)
|
||||||
if not isinstance(self.valid_extensions, list):
|
if not isinstance(self.valid_extensions, list):
|
||||||
raise AnsibleError('Invalid type for "extensions" option, it must be a list')
|
raise AnsibleError('Invalid type for "extensions" option, it must be a list')
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,6 @@ from ansible.config.manager import ensure_type
|
||||||
from ansible.errors import AnsibleError, AnsibleActionFail
|
from ansible.errors import AnsibleError, AnsibleActionFail
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||||
from ansible.module_utils.parsing.convert_bool import boolean
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.template import trust_as_template
|
from ansible.template import trust_as_template
|
||||||
from ansible._internal._templating import _template_vars
|
from ansible._internal._templating import _template_vars
|
||||||
|
|
@ -49,7 +48,7 @@ class ActionModule(ActionBase):
|
||||||
'block_end_string', 'comment_start_string', 'comment_end_string'):
|
'block_end_string', 'comment_start_string', 'comment_end_string'):
|
||||||
if s_type in self._task.args:
|
if s_type in self._task.args:
|
||||||
value = ensure_type(self._task.args[s_type], 'string')
|
value = ensure_type(self._task.args[s_type], 'string')
|
||||||
if value is not None and not isinstance(value, string_types):
|
if value is not None and not isinstance(value, str):
|
||||||
raise AnsibleActionFail("%s is expected to be a string, but got %s instead" % (s_type, type(value)))
|
raise AnsibleActionFail("%s is expected to be a string, but got %s instead" % (s_type, type(value)))
|
||||||
self._task.args[s_type] = value
|
self._task.args[s_type] = value
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,6 @@ import typing as t
|
||||||
|
|
||||||
import ansible.constants as C
|
import ansible.constants as C
|
||||||
from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleConnectionFailure
|
from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleConnectionFailure
|
||||||
from ansible.module_utils.six import text_type, binary_type
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.plugins.connection import ConnectionBase
|
from ansible.plugins.connection import ConnectionBase
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
@ -100,7 +99,7 @@ class Connection(ConnectionBase):
|
||||||
display.vvv(u"EXEC {0}".format(to_text(cmd)), host=self._play_context.remote_addr)
|
display.vvv(u"EXEC {0}".format(to_text(cmd)), host=self._play_context.remote_addr)
|
||||||
display.debug("opening command with Popen()")
|
display.debug("opening command with Popen()")
|
||||||
|
|
||||||
if isinstance(cmd, (text_type, binary_type)):
|
if isinstance(cmd, (str, bytes)):
|
||||||
cmd = to_text(cmd)
|
cmd = to_text(cmd)
|
||||||
else:
|
else:
|
||||||
cmd = map(to_text, cmd)
|
cmd = map(to_text, cmd)
|
||||||
|
|
@ -119,7 +118,7 @@ class Connection(ConnectionBase):
|
||||||
|
|
||||||
p = subprocess.Popen(
|
p = subprocess.Popen(
|
||||||
cmd,
|
cmd,
|
||||||
shell=isinstance(cmd, (text_type, binary_type)),
|
shell=isinstance(cmd, (str, bytes)),
|
||||||
executable=executable,
|
executable=executable,
|
||||||
cwd=self.cwd,
|
cwd=self.cwd,
|
||||||
stdin=stdin,
|
stdin=stdin,
|
||||||
|
|
|
||||||
|
|
@ -441,7 +441,6 @@ from ansible.errors import (
|
||||||
AnsibleError,
|
AnsibleError,
|
||||||
AnsibleFileNotFound,
|
AnsibleFileNotFound,
|
||||||
)
|
)
|
||||||
from ansible.module_utils.six import text_type, binary_type
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
||||||
from ansible.plugins.shell.powershell import _replace_stderr_clixml
|
from ansible.plugins.shell.powershell import _replace_stderr_clixml
|
||||||
|
|
@ -1122,7 +1121,7 @@ class Connection(ConnectionBase):
|
||||||
|
|
||||||
p = None
|
p = None
|
||||||
|
|
||||||
if isinstance(cmd, (text_type, binary_type)):
|
if isinstance(cmd, (str, bytes)):
|
||||||
cmd = to_bytes(cmd)
|
cmd = to_bytes(cmd)
|
||||||
else:
|
else:
|
||||||
cmd = list(map(to_bytes, cmd))
|
cmd = list(map(to_bytes, cmd))
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,6 @@ from ansible._internal._templating import _lazy_containers
|
||||||
from ansible.errors import AnsibleFilterError, AnsibleTypeError, AnsibleTemplatePluginError
|
from ansible.errors import AnsibleFilterError, AnsibleTypeError, AnsibleTemplatePluginError
|
||||||
from ansible.module_utils.datatag import native_type_name
|
from ansible.module_utils.datatag import native_type_name
|
||||||
from ansible.module_utils.common.json import get_encoder, get_decoder
|
from ansible.module_utils.common.json import get_encoder, get_decoder
|
||||||
from ansible.module_utils.six import string_types, integer_types, text_type
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.module_utils.common.collections import is_sequence
|
from ansible.module_utils.common.collections import is_sequence
|
||||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||||
|
|
@ -278,7 +277,7 @@ def rand(environment, end, start=None, step=None, seed=None):
|
||||||
r = SystemRandom()
|
r = SystemRandom()
|
||||||
else:
|
else:
|
||||||
r = Random(seed)
|
r = Random(seed)
|
||||||
if isinstance(end, integer_types):
|
if isinstance(end, int):
|
||||||
if not start:
|
if not start:
|
||||||
start = 0
|
start = 0
|
||||||
if not step:
|
if not step:
|
||||||
|
|
@ -555,7 +554,7 @@ def subelements(obj, subelements, skip_missing=False):
|
||||||
|
|
||||||
if isinstance(subelements, list):
|
if isinstance(subelements, list):
|
||||||
subelement_list = subelements[:]
|
subelement_list = subelements[:]
|
||||||
elif isinstance(subelements, string_types):
|
elif isinstance(subelements, str):
|
||||||
subelement_list = subelements.split('.')
|
subelement_list = subelements.split('.')
|
||||||
else:
|
else:
|
||||||
raise AnsibleTypeError('subelements must be a list or a string')
|
raise AnsibleTypeError('subelements must be a list or a string')
|
||||||
|
|
@ -617,7 +616,7 @@ def list_of_dict_key_value_elements_to_dict(mylist, key_name='key', value_name='
|
||||||
def path_join(paths):
|
def path_join(paths):
|
||||||
""" takes a sequence or a string, and return a concatenation
|
""" takes a sequence or a string, and return a concatenation
|
||||||
of the different members """
|
of the different members """
|
||||||
if isinstance(paths, string_types):
|
if isinstance(paths, str):
|
||||||
return os.path.join(paths)
|
return os.path.join(paths)
|
||||||
if is_sequence(paths):
|
if is_sequence(paths):
|
||||||
return os.path.join(*paths)
|
return os.path.join(*paths)
|
||||||
|
|
@ -809,7 +808,7 @@ class FilterModule(object):
|
||||||
'dict2items': dict_to_list_of_dict_key_value_elements,
|
'dict2items': dict_to_list_of_dict_key_value_elements,
|
||||||
'items2dict': list_of_dict_key_value_elements_to_dict,
|
'items2dict': list_of_dict_key_value_elements_to_dict,
|
||||||
'subelements': subelements,
|
'subelements': subelements,
|
||||||
'split': partial(unicode_wrap, text_type.split),
|
'split': partial(unicode_wrap, str.split),
|
||||||
# FDI038 - replace this with a standard type compat shim
|
# FDI038 - replace this with a standard type compat shim
|
||||||
'groupby': _cleansed_groupby,
|
'groupby': _cleansed_groupby,
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,6 @@ from jinja2.filters import pass_environment
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.common.text import formatters
|
from ansible.module_utils.common.text import formatters
|
||||||
from ansible.module_utils.six import binary_type, text_type
|
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -180,7 +179,7 @@ def rekey_on_member(data, key, duplicates='error'):
|
||||||
|
|
||||||
if isinstance(data, Mapping):
|
if isinstance(data, Mapping):
|
||||||
iterate_over = data.values()
|
iterate_over = data.values()
|
||||||
elif isinstance(data, Iterable) and not isinstance(data, (text_type, binary_type)):
|
elif isinstance(data, Iterable) and not isinstance(data, (str, bytes)):
|
||||||
iterate_over = data
|
iterate_over = data
|
||||||
else:
|
else:
|
||||||
raise AnsibleError("Type is not a valid list, set, or dict")
|
raise AnsibleError("Type is not a valid list, set, or dict")
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,6 @@ from ansible.parsing.dataloader import DataLoader
|
||||||
from ansible.plugins import AnsiblePlugin, _ConfigurablePlugin
|
from ansible.plugins import AnsiblePlugin, _ConfigurablePlugin
|
||||||
from ansible.plugins.cache import CachePluginAdjudicator
|
from ansible.plugins.cache import CachePluginAdjudicator
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.vars import combine_vars, load_extra_vars
|
from ansible.utils.vars import combine_vars, load_extra_vars
|
||||||
|
|
||||||
|
|
@ -439,7 +438,7 @@ class Constructable(_BaseInventoryPlugin):
|
||||||
new_raw_group_names = []
|
new_raw_group_names = []
|
||||||
if use_default:
|
if use_default:
|
||||||
new_raw_group_names.append(default_value_name)
|
new_raw_group_names.append(default_value_name)
|
||||||
elif isinstance(key, string_types):
|
elif isinstance(key, str):
|
||||||
new_raw_group_names.append(key)
|
new_raw_group_names.append(key)
|
||||||
elif isinstance(key, list):
|
elif isinstance(key, list):
|
||||||
for name in key:
|
for name in key:
|
||||||
|
|
|
||||||
|
|
@ -90,7 +90,6 @@ from collections.abc import MutableMapping, MutableSequence
|
||||||
|
|
||||||
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
|
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
|
|
@ -147,7 +146,7 @@ class InventoryModule(BaseFileInventoryPlugin):
|
||||||
)
|
)
|
||||||
|
|
||||||
def _load_file(self, file_name):
|
def _load_file(self, file_name):
|
||||||
if not file_name or not isinstance(file_name, string_types):
|
if not file_name or not isinstance(file_name, str):
|
||||||
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_name))
|
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_name))
|
||||||
|
|
||||||
b_file_name = to_bytes(self.loader.path_dwim(file_name))
|
b_file_name = to_bytes(self.loader.path_dwim(file_name))
|
||||||
|
|
|
||||||
|
|
@ -70,7 +70,6 @@ import os
|
||||||
from collections.abc import MutableMapping
|
from collections.abc import MutableMapping
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
||||||
|
|
||||||
|
|
@ -136,7 +135,7 @@ class InventoryModule(BaseFileInventoryPlugin):
|
||||||
for section in ['vars', 'children', 'hosts']:
|
for section in ['vars', 'children', 'hosts']:
|
||||||
if section in group_data:
|
if section in group_data:
|
||||||
# convert strings to dicts as these are allowed
|
# convert strings to dicts as these are allowed
|
||||||
if isinstance(group_data[section], string_types):
|
if isinstance(group_data[section], str):
|
||||||
group_data[section] = {group_data[section]: None}
|
group_data[section] = {group_data[section]: None}
|
||||||
|
|
||||||
if not isinstance(group_data[section], (MutableMapping, NoneType)): # type: ignore[misc]
|
if not isinstance(group_data[section], (MutableMapping, NoneType)): # type: ignore[misc]
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,6 @@ from ansible import _internal, constants as C
|
||||||
from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError
|
from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||||
from ansible.module_utils.datatag import deprecator_from_collection_name
|
from ansible.module_utils.datatag import deprecator_from_collection_name
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||||
from ansible._internal._yaml._loader import AnsibleInstrumentedLoader
|
from ansible._internal._yaml._loader import AnsibleInstrumentedLoader
|
||||||
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, AnsibleJinja2Plugin
|
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, AnsibleJinja2Plugin
|
||||||
|
|
@ -96,7 +95,7 @@ def get_shell_plugin(shell_type=None, executable=None):
|
||||||
|
|
||||||
# mostly for backwards compat
|
# mostly for backwards compat
|
||||||
if executable:
|
if executable:
|
||||||
if isinstance(executable, string_types):
|
if isinstance(executable, str):
|
||||||
shell_filename = os.path.basename(executable)
|
shell_filename = os.path.basename(executable)
|
||||||
try:
|
try:
|
||||||
shell = shell_loader.get(shell_filename)
|
shell = shell_loader.get(shell_filename)
|
||||||
|
|
|
||||||
|
|
@ -134,7 +134,6 @@ import hashlib
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.parsing.splitter import parse_kv
|
from ansible.parsing.splitter import parse_kv
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.utils.encrypt import BaseHash, do_encrypt, random_password, random_salt
|
from ansible.utils.encrypt import BaseHash, do_encrypt, random_password, random_salt
|
||||||
|
|
@ -335,7 +334,7 @@ class LookupModule(LookupBase):
|
||||||
|
|
||||||
# chars still might need more
|
# chars still might need more
|
||||||
chars = params.get('chars', self.get_option('chars'))
|
chars = params.get('chars', self.get_option('chars'))
|
||||||
if chars and isinstance(chars, string_types):
|
if chars and isinstance(chars, str):
|
||||||
tmp_chars = []
|
tmp_chars = []
|
||||||
if u',,' in chars:
|
if u',,' in chars:
|
||||||
tmp_chars.append(u',')
|
tmp_chars.append(u',')
|
||||||
|
|
|
||||||
|
|
@ -83,7 +83,6 @@ _list:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.parsing.convert_bool import boolean
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
|
||||||
|
|
@ -104,7 +103,7 @@ class LookupModule(LookupBase):
|
||||||
_raise_terms_error()
|
_raise_terms_error()
|
||||||
|
|
||||||
# first term should be a list (or dict), second a string holding the subkey
|
# first term should be a list (or dict), second a string holding the subkey
|
||||||
if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], string_types):
|
if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], str):
|
||||||
_raise_terms_error("first a dict or a list, second a string pointing to the subkey")
|
_raise_terms_error("first a dict or a list, second a string pointing to the subkey")
|
||||||
subelements = terms[1].split(".")
|
subelements = terms[1].split(".")
|
||||||
|
|
||||||
|
|
@ -122,7 +121,7 @@ class LookupModule(LookupBase):
|
||||||
flags = {}
|
flags = {}
|
||||||
if len(terms) == 3:
|
if len(terms) == 3:
|
||||||
flags = terms[2]
|
flags = terms[2]
|
||||||
if not isinstance(flags, dict) and not all(isinstance(key, string_types) and key in FLAGS for key in flags):
|
if not isinstance(flags, dict) and not all(isinstance(key, str) and key in FLAGS for key in flags):
|
||||||
_raise_terms_error("the optional third item must be a dict with flags %s" % FLAGS)
|
_raise_terms_error("the optional third item must be a dict with flags %s" % FLAGS)
|
||||||
|
|
||||||
# build_items
|
# build_items
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,6 @@ import re
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -69,7 +68,7 @@ class LookupModule(LookupBase):
|
||||||
variable_names = list(variables.keys())
|
variable_names = list(variables.keys())
|
||||||
for term in terms:
|
for term in terms:
|
||||||
|
|
||||||
if not isinstance(term, string_types):
|
if not isinstance(term, str):
|
||||||
raise AnsibleError('Invalid setting identifier, "%s" is not a string, it is a %s' % (term, type(term)))
|
raise AnsibleError('Invalid setting identifier, "%s" is not a string, it is a %s' % (term, type(term)))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -24,11 +24,8 @@ import secrets
|
||||||
import shlex
|
import shlex
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from collections.abc import Mapping, Sequence
|
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils.six import text_type, string_types
|
|
||||||
from ansible.plugins import AnsiblePlugin
|
from ansible.plugins import AnsiblePlugin
|
||||||
|
|
||||||
_USER_HOME_PATH_RE = re.compile(r'^~[_.A-Za-z0-9][-_.A-Za-z0-9]*$')
|
_USER_HOME_PATH_RE = re.compile(r'^~[_.A-Za-z0-9][-_.A-Za-z0-9]*$')
|
||||||
|
|
@ -84,7 +81,7 @@ class ShellBase(AnsiblePlugin):
|
||||||
return 'ansible-tmp-%s-%s-%s' % (time.time(), os.getpid(), secrets.randbelow(2**48))
|
return 'ansible-tmp-%s-%s-%s' % (time.time(), os.getpid(), secrets.randbelow(2**48))
|
||||||
|
|
||||||
def env_prefix(self, **kwargs):
|
def env_prefix(self, **kwargs):
|
||||||
return ' '.join(['%s=%s' % (k, self.quote(text_type(v))) for k, v in kwargs.items()])
|
return ' '.join(['%s=%s' % (k, self.quote(str(v))) for k, v in kwargs.items()])
|
||||||
|
|
||||||
def join_path(self, *args):
|
def join_path(self, *args):
|
||||||
return os.path.join(*args)
|
return os.path.join(*args)
|
||||||
|
|
|
||||||
|
|
@ -9,14 +9,14 @@ from __future__ import annotations
|
||||||
from abc import ABCMeta
|
from abc import ABCMeta
|
||||||
from collections.abc import Container, Mapping, Sequence, Set
|
from collections.abc import Container, Mapping, Sequence, Set
|
||||||
|
|
||||||
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.common.collections import ImmutableDict
|
from ansible.module_utils.common.collections import ImmutableDict
|
||||||
from ansible.module_utils.six import add_metaclass, binary_type, text_type
|
|
||||||
from ansible.utils.singleton import Singleton
|
from ansible.utils.singleton import Singleton
|
||||||
|
|
||||||
|
|
||||||
def _make_immutable(obj):
|
def _make_immutable(obj):
|
||||||
"""Recursively convert a container and objects inside of it into immutable data types"""
|
"""Recursively convert a container and objects inside of it into immutable data types"""
|
||||||
if isinstance(obj, (text_type, binary_type)):
|
if isinstance(obj, (str, bytes)):
|
||||||
# Strings first because they are also sequences
|
# Strings first because they are also sequences
|
||||||
return obj
|
return obj
|
||||||
elif isinstance(obj, Mapping):
|
elif isinstance(obj, Mapping):
|
||||||
|
|
@ -79,11 +79,14 @@ class CLIArgs(ImmutableDict):
|
||||||
return cls(vars(options))
|
return cls(vars(options))
|
||||||
|
|
||||||
|
|
||||||
@add_metaclass(_ABCSingleton)
|
class GlobalCLIArgs(CLIArgs, metaclass=_ABCSingleton):
|
||||||
class GlobalCLIArgs(CLIArgs):
|
|
||||||
"""
|
"""
|
||||||
Globally hold a parsed copy of cli arguments.
|
Globally hold a parsed copy of cli arguments.
|
||||||
|
|
||||||
Only one of these exist per program as it is for global context
|
Only one of these exist per program as it is for global context
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "add_metaclass")
|
||||||
|
|
|
||||||
|
|
@ -53,11 +53,10 @@ from ansible.constants import config
|
||||||
from ansible.errors import AnsibleAssertionError, AnsiblePromptInterrupt, AnsiblePromptNoninteractive, AnsibleError
|
from ansible.errors import AnsibleAssertionError, AnsiblePromptInterrupt, AnsiblePromptNoninteractive, AnsibleError
|
||||||
from ansible._internal._errors import _error_utils, _error_factory
|
from ansible._internal._errors import _error_utils, _error_factory
|
||||||
from ansible._internal import _event_formatting
|
from ansible._internal import _event_formatting
|
||||||
from ansible.module_utils._internal import _ambient_context, _deprecator, _messages
|
from ansible.module_utils._internal import _ambient_context, _deprecator, _messages, _no_six
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.module_utils.datatag import deprecator_from_collection_name
|
from ansible.module_utils.datatag import deprecator_from_collection_name
|
||||||
from ansible._internal._datatag._tags import TrustedAsTemplate
|
from ansible._internal._datatag._tags import TrustedAsTemplate
|
||||||
from ansible.module_utils.six import text_type
|
|
||||||
from ansible.module_utils._internal import _traceback, _errors
|
from ansible.module_utils._internal import _traceback, _errors
|
||||||
from ansible.utils.color import stringc
|
from ansible.utils.color import stringc
|
||||||
from ansible.utils.multiprocessing import context as multiprocessing_context
|
from ansible.utils.multiprocessing import context as multiprocessing_context
|
||||||
|
|
@ -106,7 +105,7 @@ def get_text_width(text: str) -> int:
|
||||||
character and using wcwidth individually, falling back to a value of 0
|
character and using wcwidth individually, falling back to a value of 0
|
||||||
for non-printable wide characters.
|
for non-printable wide characters.
|
||||||
"""
|
"""
|
||||||
if not isinstance(text, text_type):
|
if not isinstance(text, str):
|
||||||
raise TypeError('get_text_width requires text, not %s' % type(text))
|
raise TypeError('get_text_width requires text, not %s' % type(text))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -1282,3 +1281,7 @@ def _report_config_warnings(deprecator: _messages.PluginInfo) -> None:
|
||||||
# emit any warnings or deprecations
|
# emit any warnings or deprecations
|
||||||
# in the event config fails before display is up, we'll lose warnings -- but that's OK, since everything is broken anyway
|
# in the event config fails before display is up, we'll lose warnings -- but that's OK, since everything is broken anyway
|
||||||
_report_config_warnings(_deprecator.ANSIBLE_CORE_DEPRECATOR)
|
_report_config_warnings(_deprecator.ANSIBLE_CORE_DEPRECATOR)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "text_type")
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,7 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ansible.module_utils.six import string_types
|
from ansible.module_utils._internal import _no_six
|
||||||
|
|
||||||
|
|
||||||
def pct_to_int(value, num_items, min_value=1):
|
def pct_to_int(value, num_items, min_value=1):
|
||||||
|
|
@ -25,7 +25,7 @@ def pct_to_int(value, num_items, min_value=1):
|
||||||
Converts a given value to a percentage if specified as "x%",
|
Converts a given value to a percentage if specified as "x%",
|
||||||
otherwise converts the given value to an integer.
|
otherwise converts the given value to an integer.
|
||||||
"""
|
"""
|
||||||
if isinstance(value, string_types) and value.endswith('%'):
|
if isinstance(value, str) and value.endswith('%'):
|
||||||
value_pct = int(value.replace("%", ""))
|
value_pct = int(value.replace("%", ""))
|
||||||
return int((value_pct / 100.0) * num_items) or min_value
|
return int((value_pct / 100.0) * num_items) or min_value
|
||||||
else:
|
else:
|
||||||
|
|
@ -47,3 +47,7 @@ def deduplicate_list(original_list):
|
||||||
"""
|
"""
|
||||||
seen = set()
|
seen = set()
|
||||||
return [x for x in original_list if x not in seen and not seen.add(x)]
|
return [x for x in original_list if x not in seen and not seen.add(x)]
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "string_types")
|
||||||
|
|
|
||||||
|
|
@ -7,9 +7,9 @@ import json
|
||||||
import pickle
|
import pickle
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.connection import ConnectionError
|
from ansible.module_utils.connection import ConnectionError
|
||||||
from ansible.module_utils.six import binary_type, text_type
|
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
|
@ -79,9 +79,9 @@ class JsonRpcServer(object):
|
||||||
|
|
||||||
def response(self, result=None):
|
def response(self, result=None):
|
||||||
response = self.header()
|
response = self.header()
|
||||||
if isinstance(result, binary_type):
|
if isinstance(result, bytes):
|
||||||
result = to_text(result)
|
result = to_text(result)
|
||||||
if not isinstance(result, text_type):
|
if not isinstance(result, str):
|
||||||
response["result_type"] = "pickle"
|
response["result_type"] = "pickle"
|
||||||
result = to_text(pickle.dumps(result), errors='surrogateescape')
|
result = to_text(pickle.dumps(result), errors='surrogateescape')
|
||||||
response['result'] = result
|
response['result'] = result
|
||||||
|
|
@ -110,3 +110,7 @@ class JsonRpcServer(object):
|
||||||
|
|
||||||
def internal_error(self, data=None):
|
def internal_error(self, data=None):
|
||||||
return self.error(-32603, 'Internal error', data)
|
return self.error(-32603, 'Internal error', data)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type")
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ import yaml
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.release import __version__ as ansible_version
|
from ansible.release import __version__ as ansible_version
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError, AnsiblePluginNotFound
|
from ansible.errors import AnsibleError, AnsibleParserError, AnsiblePluginNotFound
|
||||||
from ansible.module_utils.six import string_types
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.parsing.plugin_docs import read_docstring
|
from ansible.parsing.plugin_docs import read_docstring
|
||||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||||
|
|
@ -133,7 +133,7 @@ def add_fragments(doc, filename, fragment_loader, is_module=False, section='DOCU
|
||||||
|
|
||||||
fragments = doc.pop('extends_documentation_fragment', [])
|
fragments = doc.pop('extends_documentation_fragment', [])
|
||||||
|
|
||||||
if isinstance(fragments, string_types):
|
if isinstance(fragments, str):
|
||||||
fragments = fragments.split(',')
|
fragments = fragments.split(',')
|
||||||
|
|
||||||
unknown_fragments = []
|
unknown_fragments = []
|
||||||
|
|
@ -355,3 +355,7 @@ def get_plugin_docs(plugin, plugin_type, loader, fragment_loader, verbose):
|
||||||
docs[0]['plugin_name'] = context.resolved_fqcn
|
docs[0]['plugin_name'] = context.resolved_fqcn
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "string_types")
|
||||||
|
|
|
||||||
|
|
@ -7,10 +7,10 @@ from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Mapping, Set
|
from collections.abc import Mapping, Set
|
||||||
|
|
||||||
|
from ansible.module_utils._internal import _no_six
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.module_utils.common.collections import is_sequence
|
from ansible.module_utils.common.collections import is_sequence
|
||||||
from ansible._internal._datatag._tags import TrustedAsTemplate
|
from ansible._internal._datatag._tags import TrustedAsTemplate
|
||||||
from ansible.module_utils.six import binary_type, text_type
|
|
||||||
|
|
||||||
__all__ = ['AnsibleUnsafe', 'wrap_var']
|
__all__ = ['AnsibleUnsafe', 'wrap_var']
|
||||||
|
|
||||||
|
|
@ -62,9 +62,9 @@ def wrap_var(v):
|
||||||
v = _wrap_set(v)
|
v = _wrap_set(v)
|
||||||
elif is_sequence(v):
|
elif is_sequence(v):
|
||||||
v = _wrap_sequence(v)
|
v = _wrap_sequence(v)
|
||||||
elif isinstance(v, binary_type):
|
elif isinstance(v, bytes):
|
||||||
v = AnsibleUnsafeBytes(v)
|
v = AnsibleUnsafeBytes(v)
|
||||||
elif isinstance(v, text_type):
|
elif isinstance(v, str):
|
||||||
v = AnsibleUnsafeText(v)
|
v = AnsibleUnsafeText(v)
|
||||||
|
|
||||||
return v
|
return v
|
||||||
|
|
@ -76,3 +76,7 @@ def to_unsafe_bytes(*args, **kwargs):
|
||||||
|
|
||||||
def to_unsafe_text(*args, **kwargs):
|
def to_unsafe_text(*args, **kwargs):
|
||||||
return wrap_var(to_text(*args, **kwargs))
|
return wrap_var(to_text(*args, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(importable_name):
|
||||||
|
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type")
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ from collections.abc import MutableMapping, MutableSequence
|
||||||
|
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils import six
|
|
||||||
from ansible.plugins.loader import connection_loader
|
from ansible.plugins.loader import connection_loader
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
|
|
@ -48,7 +47,7 @@ def module_response_deepcopy(v):
|
||||||
"""
|
"""
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
ret = v.copy()
|
ret = v.copy()
|
||||||
items = six.iteritems(ret)
|
items = ret.items()
|
||||||
elif isinstance(v, list):
|
elif isinstance(v, list):
|
||||||
ret = v[:]
|
ret = v[:]
|
||||||
items = enumerate(ret)
|
items = enumerate(ret)
|
||||||
|
|
@ -80,7 +79,7 @@ def strip_internal_keys(dirty, exceptions=None):
|
||||||
|
|
||||||
# listify to avoid updating dict while iterating over it
|
# listify to avoid updating dict while iterating over it
|
||||||
for k in list(dirty.keys()):
|
for k in list(dirty.keys()):
|
||||||
if isinstance(k, six.string_types):
|
if isinstance(k, str):
|
||||||
if k.startswith('_ansible_') and k not in exceptions:
|
if k.startswith('_ansible_') and k not in exceptions:
|
||||||
del dirty[k]
|
del dirty[k]
|
||||||
continue
|
continue
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,6 @@ from ansible.inventory.host import Host
|
||||||
from ansible.inventory.helpers import sort_groups, get_group_vars
|
from ansible.inventory.helpers import sort_groups, get_group_vars
|
||||||
from ansible.inventory.manager import InventoryManager
|
from ansible.inventory.manager import InventoryManager
|
||||||
from ansible.module_utils.datatag import native_type_name
|
from ansible.module_utils.datatag import native_type_name
|
||||||
from ansible.module_utils.six import text_type
|
|
||||||
from ansible.parsing.dataloader import DataLoader
|
from ansible.parsing.dataloader import DataLoader
|
||||||
from ansible._internal._templating._engine import TemplateEngine
|
from ansible._internal._templating._engine import TemplateEngine
|
||||||
from ansible.plugins.loader import cache_loader
|
from ansible.plugins.loader import cache_loader
|
||||||
|
|
@ -467,7 +466,7 @@ class VariableManager:
|
||||||
if task._role:
|
if task._role:
|
||||||
variables['role_name'] = task._role.get_name(include_role_fqcn=False)
|
variables['role_name'] = task._role.get_name(include_role_fqcn=False)
|
||||||
variables['role_path'] = task._role._role_path
|
variables['role_path'] = task._role._role_path
|
||||||
variables['role_uuid'] = text_type(task._role._uuid)
|
variables['role_uuid'] = str(task._role._uuid)
|
||||||
variables['ansible_collection_name'] = task._role._role_collection
|
variables['ansible_collection_name'] = task._role._role_collection
|
||||||
variables['ansible_role_name'] = task._role.get_name()
|
variables['ansible_role_name'] = task._role.get_name()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,12 +6,6 @@ import os
|
||||||
import pexpect
|
import pexpect
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from ansible.module_utils.six import PY2
|
|
||||||
|
|
||||||
if PY2:
|
|
||||||
log_buffer = sys.stdout
|
|
||||||
else:
|
|
||||||
log_buffer = sys.stdout.buffer
|
|
||||||
|
|
||||||
env_vars = {
|
env_vars = {
|
||||||
'ANSIBLE_ROLES_PATH': './roles',
|
'ANSIBLE_ROLES_PATH': './roles',
|
||||||
|
|
@ -36,7 +30,7 @@ def run_test(playbook, test_spec, args=None, timeout=10, env=None):
|
||||||
env=env,
|
env=env,
|
||||||
)
|
)
|
||||||
|
|
||||||
vars_prompt_test.logfile = log_buffer
|
vars_prompt_test.logfile = sys.stdout.buffer
|
||||||
for item in test_spec[0]:
|
for item in test_spec[0]:
|
||||||
vars_prompt_test.expect(item[0])
|
vars_prompt_test.expect(item[0])
|
||||||
if item[1]:
|
if item[1]:
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ import pexpect
|
||||||
import sys
|
import sys
|
||||||
import termios
|
import termios
|
||||||
|
|
||||||
from ansible.module_utils.six import PY2
|
|
||||||
|
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
|
|
||||||
|
|
@ -22,11 +21,6 @@ try:
|
||||||
except Exception:
|
except Exception:
|
||||||
backspace = b'\x7f'
|
backspace = b'\x7f'
|
||||||
|
|
||||||
if PY2:
|
|
||||||
log_buffer = sys.stdout
|
|
||||||
else:
|
|
||||||
log_buffer = sys.stdout.buffer
|
|
||||||
|
|
||||||
os.environ.update(env_vars)
|
os.environ.update(env_vars)
|
||||||
|
|
||||||
# -- Plain pause -- #
|
# -- Plain pause -- #
|
||||||
|
|
@ -40,7 +34,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
||||||
pause_test.send('\r')
|
pause_test.send('\r')
|
||||||
pause_test.expect('Task after pause')
|
pause_test.expect('Task after pause')
|
||||||
|
|
@ -56,7 +50,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
||||||
pause_test.send('\x03')
|
pause_test.send('\x03')
|
||||||
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
||||||
|
|
@ -74,7 +68,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
||||||
pause_test.send('\x03')
|
pause_test.send('\x03')
|
||||||
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
||||||
|
|
@ -94,7 +88,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Custom prompt:')
|
pause_test.expect(r'Custom prompt:')
|
||||||
pause_test.send('\r')
|
pause_test.send('\r')
|
||||||
pause_test.expect('Task after pause')
|
pause_test.expect('Task after pause')
|
||||||
|
|
@ -110,7 +104,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Custom prompt:')
|
pause_test.expect(r'Custom prompt:')
|
||||||
pause_test.send('\x03')
|
pause_test.send('\x03')
|
||||||
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
||||||
|
|
@ -128,7 +122,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Custom prompt:')
|
pause_test.expect(r'Custom prompt:')
|
||||||
pause_test.send('\x03')
|
pause_test.send('\x03')
|
||||||
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
||||||
|
|
@ -149,7 +143,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Pausing for \d+ seconds')
|
pause_test.expect(r'Pausing for \d+ seconds')
|
||||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||||
pause_test.expect('Task after pause')
|
pause_test.expect('Task after pause')
|
||||||
|
|
@ -164,7 +158,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Pausing for \d+ seconds')
|
pause_test.expect(r'Pausing for \d+ seconds')
|
||||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||||
pause_test.send('\n') # test newline does not stop the prompt - waiting for a timeout or ctrl+C
|
pause_test.send('\n') # test newline does not stop the prompt - waiting for a timeout or ctrl+C
|
||||||
|
|
@ -184,7 +178,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Pausing for \d+ seconds')
|
pause_test.expect(r'Pausing for \d+ seconds')
|
||||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||||
pause_test.send('\x03')
|
pause_test.send('\x03')
|
||||||
|
|
@ -206,7 +200,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Pausing for \d+ seconds')
|
pause_test.expect(r'Pausing for \d+ seconds')
|
||||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||||
pause_test.expect(r"Waiting for two seconds:")
|
pause_test.expect(r"Waiting for two seconds:")
|
||||||
|
|
@ -222,7 +216,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Pausing for \d+ seconds')
|
pause_test.expect(r'Pausing for \d+ seconds')
|
||||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||||
pause_test.expect(r"Waiting for two seconds:")
|
pause_test.expect(r"Waiting for two seconds:")
|
||||||
|
|
@ -242,7 +236,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Pausing for \d+ seconds')
|
pause_test.expect(r'Pausing for \d+ seconds')
|
||||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||||
pause_test.expect(r"Waiting for two seconds:")
|
pause_test.expect(r"Waiting for two seconds:")
|
||||||
|
|
@ -264,7 +258,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Enter some text:')
|
pause_test.expect(r'Enter some text:')
|
||||||
pause_test.send('hello there')
|
pause_test.send('hello there')
|
||||||
pause_test.send('\r')
|
pause_test.send('\r')
|
||||||
|
|
@ -290,7 +284,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r'Wait for three seconds:')
|
pause_test.expect(r'Wait for three seconds:')
|
||||||
pause_test.send('ignored user input')
|
pause_test.send('ignored user input')
|
||||||
pause_test.expect('Task after pause')
|
pause_test.expect('Task after pause')
|
||||||
|
|
@ -307,7 +301,7 @@ pause_test = pexpect.spawn(
|
||||||
env=os.environ
|
env=os.environ
|
||||||
)
|
)
|
||||||
|
|
||||||
pause_test.logfile = log_buffer
|
pause_test.logfile = sys.stdout.buffer
|
||||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||||
pause_test.send('\r')
|
pause_test.send('\r')
|
||||||
pause_test.expect(pexpect.EOF)
|
pause_test.expect(pexpect.EOF)
|
||||||
|
|
|
||||||
|
|
@ -62,7 +62,6 @@ import os
|
||||||
from collections.abc import MutableMapping
|
from collections.abc import MutableMapping
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
||||||
|
|
||||||
|
|
@ -126,7 +125,7 @@ class InventoryModule(BaseFileInventoryPlugin):
|
||||||
for section in ['vars', 'children', 'hosts']:
|
for section in ['vars', 'children', 'hosts']:
|
||||||
if section in group_data:
|
if section in group_data:
|
||||||
# convert strings to dicts as these are allowed
|
# convert strings to dicts as these are allowed
|
||||||
if isinstance(group_data[section], string_types):
|
if isinstance(group_data[section], str):
|
||||||
group_data[section] = {group_data[section]: None}
|
group_data[section] = {group_data[section]: None}
|
||||||
|
|
||||||
if not isinstance(group_data[section], (MutableMapping, NoneType)):
|
if not isinstance(group_data[section], (MutableMapping, NoneType)):
|
||||||
|
|
|
||||||
|
|
@ -17,14 +17,13 @@ from voluptuous import Required, Schema, Invalid
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from ansible.module_utils.compat.version import StrictVersion, LooseVersion
|
from ansible.module_utils.compat.version import StrictVersion, LooseVersion
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionRef
|
from ansible.utils.collection_loader import AnsibleCollectionRef
|
||||||
from ansible.utils.version import SemanticVersion
|
from ansible.utils.version import SemanticVersion
|
||||||
|
|
||||||
|
|
||||||
def fqcr(value):
|
def fqcr(value):
|
||||||
"""Validate a FQCR."""
|
"""Validate a FQCR."""
|
||||||
if not isinstance(value, string_types):
|
if not isinstance(value, str):
|
||||||
raise Invalid('Must be a string that is a FQCR')
|
raise Invalid('Must be a string that is a FQCR')
|
||||||
if not AnsibleCollectionRef.is_valid_fqcr(value):
|
if not AnsibleCollectionRef.is_valid_fqcr(value):
|
||||||
raise Invalid('Must be a FQCR')
|
raise Invalid('Must be a FQCR')
|
||||||
|
|
@ -33,7 +32,7 @@ def fqcr(value):
|
||||||
|
|
||||||
def fqcr_or_shortname(value):
|
def fqcr_or_shortname(value):
|
||||||
"""Validate a FQCR or a shortname."""
|
"""Validate a FQCR or a shortname."""
|
||||||
if not isinstance(value, string_types):
|
if not isinstance(value, str):
|
||||||
raise Invalid('Must be a string that is a FQCR or a short name')
|
raise Invalid('Must be a string that is a FQCR or a short name')
|
||||||
if '.' in value and not AnsibleCollectionRef.is_valid_fqcr(value):
|
if '.' in value and not AnsibleCollectionRef.is_valid_fqcr(value):
|
||||||
raise Invalid('Must be a FQCR or a short name')
|
raise Invalid('Must be a FQCR or a short name')
|
||||||
|
|
@ -48,7 +47,7 @@ def isodate(value, check_deprecation_date=False, is_tombstone=False):
|
||||||
else:
|
else:
|
||||||
# make sure we have a string
|
# make sure we have a string
|
||||||
msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date'
|
msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date'
|
||||||
if not isinstance(value, string_types):
|
if not isinstance(value, str):
|
||||||
raise Invalid(msg)
|
raise Invalid(msg)
|
||||||
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
|
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
|
||||||
# we have to do things manually.
|
# we have to do things manually.
|
||||||
|
|
@ -80,7 +79,7 @@ def removal_version(value, is_ansible, current_version=None, is_tombstone=False)
|
||||||
'Removal version must be a string' if is_ansible else
|
'Removal version must be a string' if is_ansible else
|
||||||
'Removal version must be a semantic version (https://semver.org/)'
|
'Removal version must be a semantic version (https://semver.org/)'
|
||||||
)
|
)
|
||||||
if not isinstance(value, string_types):
|
if not isinstance(value, str):
|
||||||
raise Invalid(msg)
|
raise Invalid(msg)
|
||||||
try:
|
try:
|
||||||
if is_ansible:
|
if is_ansible:
|
||||||
|
|
@ -191,7 +190,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
||||||
'removal_version': partial(removal_version, is_ansible=is_ansible,
|
'removal_version': partial(removal_version, is_ansible=is_ansible,
|
||||||
current_version=current_version),
|
current_version=current_version),
|
||||||
'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates),
|
'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates),
|
||||||
'warning_text': Any(*string_types),
|
'warning_text': str,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
avoid_additional_data
|
avoid_additional_data
|
||||||
|
|
@ -204,7 +203,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
||||||
'removal_version': partial(removal_version, is_ansible=is_ansible,
|
'removal_version': partial(removal_version, is_ansible=is_ansible,
|
||||||
current_version=current_version, is_tombstone=True),
|
current_version=current_version, is_tombstone=True),
|
||||||
'removal_date': partial(isodate, is_tombstone=True),
|
'removal_date': partial(isodate, is_tombstone=True),
|
||||||
'warning_text': Any(*string_types),
|
'warning_text': str,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
avoid_additional_data
|
avoid_additional_data
|
||||||
|
|
@ -228,18 +227,15 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
||||||
# Adjusted schema for module_utils
|
# Adjusted schema for module_utils
|
||||||
plugin_routing_schema_mu = Any(
|
plugin_routing_schema_mu = Any(
|
||||||
plugins_routing_common_schema.extend({
|
plugins_routing_common_schema.extend({
|
||||||
('redirect'): Any(*string_types)}
|
('redirect'): str}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema}
|
list_dict_plugin_routing_schema = [{str: plugin_routing_schema}]
|
||||||
for str_type in string_types]
|
|
||||||
|
|
||||||
list_dict_plugin_routing_schema_mu = [{str_type: plugin_routing_schema_mu}
|
list_dict_plugin_routing_schema_mu = [{str: plugin_routing_schema_mu}]
|
||||||
for str_type in string_types]
|
|
||||||
|
|
||||||
list_dict_plugin_routing_schema_modules = [{str_type: plugin_routing_schema_modules}
|
list_dict_plugin_routing_schema_modules = [{str: plugin_routing_schema_modules}]
|
||||||
for str_type in string_types]
|
|
||||||
|
|
||||||
plugin_schema = Schema({
|
plugin_schema = Schema({
|
||||||
('action'): Any(None, *list_dict_plugin_routing_schema),
|
('action'): Any(None, *list_dict_plugin_routing_schema),
|
||||||
|
|
@ -267,13 +263,12 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
||||||
|
|
||||||
import_redirection_schema = Any(
|
import_redirection_schema = Any(
|
||||||
Schema({
|
Schema({
|
||||||
('redirect'): Any(*string_types),
|
('redirect'): str,
|
||||||
# import_redirect doesn't currently support deprecation
|
# import_redirect doesn't currently support deprecation
|
||||||
}, extra=PREVENT_EXTRA)
|
}, extra=PREVENT_EXTRA)
|
||||||
)
|
)
|
||||||
|
|
||||||
list_dict_import_redirection_schema = [{str_type: import_redirection_schema}
|
list_dict_import_redirection_schema = [{str: import_redirection_schema}]
|
||||||
for str_type in string_types]
|
|
||||||
|
|
||||||
# action_groups schema
|
# action_groups schema
|
||||||
|
|
||||||
|
|
@ -289,7 +284,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
||||||
}, extra=PREVENT_EXTRA)
|
}, extra=PREVENT_EXTRA)
|
||||||
}, extra=PREVENT_EXTRA)
|
}, extra=PREVENT_EXTRA)
|
||||||
action_group_schema = All([metadata_dict, fqcr_or_shortname], at_most_one_dict)
|
action_group_schema = All([metadata_dict, fqcr_or_shortname], at_most_one_dict)
|
||||||
list_dict_action_groups_schema = [{str_type: action_group_schema} for str_type in string_types]
|
list_dict_action_groups_schema = [{str: action_group_schema}]
|
||||||
|
|
||||||
# top level schema
|
# top level schema
|
||||||
|
|
||||||
|
|
@ -298,7 +293,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
||||||
('plugin_routing'): Any(plugin_schema),
|
('plugin_routing'): Any(plugin_schema),
|
||||||
('import_redirection'): Any(None, *list_dict_import_redirection_schema),
|
('import_redirection'): Any(None, *list_dict_import_redirection_schema),
|
||||||
# requires_ansible: In the future we should validate this with SpecifierSet
|
# requires_ansible: In the future we should validate this with SpecifierSet
|
||||||
('requires_ansible'): Any(*string_types),
|
('requires_ansible'): str,
|
||||||
('action_groups'): Any(*list_dict_action_groups_schema),
|
('action_groups'): Any(*list_dict_action_groups_schema),
|
||||||
}, extra=PREVENT_EXTRA)
|
}, extra=PREVENT_EXTRA)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,6 @@ from contextlib import contextmanager
|
||||||
|
|
||||||
from ansible.executor.powershell.module_manifest import PSModuleDepFinder
|
from ansible.executor.powershell.module_manifest import PSModuleDepFinder
|
||||||
from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS, AnsibleModule
|
from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS, AnsibleModule
|
||||||
from ansible.module_utils.six import reraise
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
|
||||||
from .utils import CaptureStd, find_executable, get_module_name_from_filename
|
from .utils import CaptureStd, find_executable, get_module_name_from_filename
|
||||||
|
|
@ -153,7 +152,7 @@ def get_py_argument_spec(filename, collection):
|
||||||
pass
|
pass
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
# we want to catch all exceptions here, including sys.exit
|
# we want to catch all exceptions here, including sys.exit
|
||||||
reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2])
|
raise AnsibleModuleImportError from e
|
||||||
|
|
||||||
if not fake.called:
|
if not fake.called:
|
||||||
raise AnsibleModuleNotInitialized()
|
raise AnsibleModuleNotInitialized()
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,6 @@ from urllib.parse import urlparse
|
||||||
|
|
||||||
from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, MultipleInvalid, Required, Schema, Self, ValueInvalid, Exclusive
|
from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, MultipleInvalid, Required, Schema, Self, ValueInvalid, Exclusive
|
||||||
from ansible.constants import DOCUMENTABLE_PLUGINS
|
from ansible.constants import DOCUMENTABLE_PLUGINS
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.collections import is_iterable
|
from ansible.module_utils.common.collections import is_iterable
|
||||||
from ansible.module_utils.parsing.convert_bool import boolean
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
from ansible.parsing.quoting import unquote
|
from ansible.parsing.quoting import unquote
|
||||||
|
|
@ -25,9 +24,8 @@ from antsibull_docs_parser.parser import parse, Context
|
||||||
|
|
||||||
from .utils import parse_isodate
|
from .utils import parse_isodate
|
||||||
|
|
||||||
list_string_types = list(string_types)
|
list_string_types = [str]
|
||||||
tuple_string_types = tuple(string_types)
|
tuple_string_types = (str,)
|
||||||
any_string_types = Any(*string_types)
|
|
||||||
|
|
||||||
# Valid DOCUMENTATION.author lines
|
# Valid DOCUMENTATION.author lines
|
||||||
# Based on Ansibulbot's extract_github_id()
|
# Based on Ansibulbot's extract_github_id()
|
||||||
|
|
@ -57,7 +55,7 @@ FULLY_QUALIFIED_COLLECTION_RESOURCE_RE = re.compile(r'^\w+(?:\.\w+){2,}$')
|
||||||
|
|
||||||
|
|
||||||
def collection_name(v, error_code=None):
|
def collection_name(v, error_code=None):
|
||||||
if not isinstance(v, string_types):
|
if not isinstance(v, str):
|
||||||
raise _add_ansible_error_code(
|
raise _add_ansible_error_code(
|
||||||
Invalid('Collection name must be a string'), error_code or 'collection-invalid-name')
|
Invalid('Collection name must be a string'), error_code or 'collection-invalid-name')
|
||||||
m = COLLECTION_NAME_RE.match(v)
|
m = COLLECTION_NAME_RE.match(v)
|
||||||
|
|
@ -68,7 +66,7 @@ def collection_name(v, error_code=None):
|
||||||
|
|
||||||
|
|
||||||
def fqcn(v, error_code=None):
|
def fqcn(v, error_code=None):
|
||||||
if not isinstance(v, string_types):
|
if not isinstance(v, str):
|
||||||
raise _add_ansible_error_code(
|
raise _add_ansible_error_code(
|
||||||
Invalid('Module/plugin name must be a string'), error_code or 'invalid-documentation')
|
Invalid('Module/plugin name must be a string'), error_code or 'invalid-documentation')
|
||||||
m = FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(v)
|
m = FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(v)
|
||||||
|
|
@ -87,8 +85,8 @@ def deprecation_versions():
|
||||||
def version(for_collection=False):
|
def version(for_collection=False):
|
||||||
if for_collection:
|
if for_collection:
|
||||||
# We do not accept floats for versions in collections
|
# We do not accept floats for versions in collections
|
||||||
return Any(*string_types)
|
return str
|
||||||
return Any(float, *string_types)
|
return Any(float, str)
|
||||||
|
|
||||||
|
|
||||||
def date(error_code=None):
|
def date(error_code=None):
|
||||||
|
|
@ -128,7 +126,7 @@ def _check_url(directive, content):
|
||||||
|
|
||||||
def doc_string(v):
|
def doc_string(v):
|
||||||
"""Match a documentation string."""
|
"""Match a documentation string."""
|
||||||
if not isinstance(v, string_types):
|
if not isinstance(v, str):
|
||||||
raise _add_ansible_error_code(
|
raise _add_ansible_error_code(
|
||||||
Invalid('Must be a string'), 'invalid-documentation')
|
Invalid('Must be a string'), 'invalid-documentation')
|
||||||
errors = []
|
errors = []
|
||||||
|
|
@ -216,12 +214,12 @@ seealso_schema = Schema(
|
||||||
'description': doc_string,
|
'description': doc_string,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Required('ref'): Any(*string_types),
|
Required('ref'): str,
|
||||||
Required('description'): doc_string,
|
Required('description'): doc_string,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Required('name'): Any(*string_types),
|
Required('name'): str,
|
||||||
Required('link'): Any(*string_types),
|
Required('link'): str,
|
||||||
Required('description'): doc_string,
|
Required('description'): doc_string,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
|
@ -238,7 +236,7 @@ argument_spec_modifiers = {
|
||||||
'required_together': sequence_of_sequences(min=2),
|
'required_together': sequence_of_sequences(min=2),
|
||||||
'required_one_of': sequence_of_sequences(min=2),
|
'required_one_of': sequence_of_sequences(min=2),
|
||||||
'required_if': sequence_of_sequences(min=3, max=4),
|
'required_if': sequence_of_sequences(min=3, max=4),
|
||||||
'required_by': Schema({str: Any(list_string_types, tuple_string_types, *string_types)}),
|
'required_by': Schema({str: Any(list_string_types, tuple_string_types, str)}),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -263,7 +261,7 @@ def options_with_apply_defaults(v):
|
||||||
def check_removal_version(v, version_field, collection_name_field, error_code='invalid-removal-version'):
|
def check_removal_version(v, version_field, collection_name_field, error_code='invalid-removal-version'):
|
||||||
version = v.get(version_field)
|
version = v.get(version_field)
|
||||||
collection_name = v.get(collection_name_field)
|
collection_name = v.get(collection_name_field)
|
||||||
if not isinstance(version, string_types) or not isinstance(collection_name, string_types):
|
if not isinstance(version, str) or not isinstance(collection_name, str):
|
||||||
# If they are not strings, schema validation will have already complained.
|
# If they are not strings, schema validation will have already complained.
|
||||||
return v
|
return v
|
||||||
if collection_name == 'ansible.builtin':
|
if collection_name == 'ansible.builtin':
|
||||||
|
|
@ -313,9 +311,8 @@ def option_deprecation(v):
|
||||||
|
|
||||||
|
|
||||||
def argument_spec_schema(for_collection):
|
def argument_spec_schema(for_collection):
|
||||||
any_string_types = Any(*string_types)
|
|
||||||
schema = {
|
schema = {
|
||||||
any_string_types: {
|
str: {
|
||||||
'type': Any(is_callable, *argument_spec_types),
|
'type': Any(is_callable, *argument_spec_types),
|
||||||
'elements': Any(*argument_spec_types),
|
'elements': Any(*argument_spec_types),
|
||||||
'default': object,
|
'default': object,
|
||||||
|
|
@ -336,12 +333,12 @@ def argument_spec_schema(for_collection):
|
||||||
'deprecated_aliases': Any([All(
|
'deprecated_aliases': Any([All(
|
||||||
Any(
|
Any(
|
||||||
{
|
{
|
||||||
Required('name'): Any(*string_types),
|
Required('name'): str,
|
||||||
Required('date'): date(),
|
Required('date'): date(),
|
||||||
Required('collection_name'): collection_name,
|
Required('collection_name'): collection_name,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Required('name'): Any(*string_types),
|
Required('name'): str,
|
||||||
Required('version'): version(for_collection),
|
Required('version'): version(for_collection),
|
||||||
Required('collection_name'): collection_name,
|
Required('collection_name'): collection_name,
|
||||||
},
|
},
|
||||||
|
|
@ -353,13 +350,13 @@ def argument_spec_schema(for_collection):
|
||||||
)]),
|
)]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
schema[any_string_types].update(argument_spec_modifiers)
|
schema[str].update(argument_spec_modifiers)
|
||||||
schemas = All(
|
schemas = All(
|
||||||
schema,
|
schema,
|
||||||
Schema({any_string_types: no_required_with_default}),
|
Schema({str: no_required_with_default}),
|
||||||
Schema({any_string_types: elements_with_list}),
|
Schema({str: elements_with_list}),
|
||||||
Schema({any_string_types: options_with_apply_defaults}),
|
Schema({str: options_with_apply_defaults}),
|
||||||
Schema({any_string_types: option_deprecation}),
|
Schema({str: option_deprecation}),
|
||||||
)
|
)
|
||||||
return Schema(schemas)
|
return Schema(schemas)
|
||||||
|
|
||||||
|
|
@ -385,14 +382,15 @@ json_value = Schema(Any(
|
||||||
int,
|
int,
|
||||||
float,
|
float,
|
||||||
[Self],
|
[Self],
|
||||||
*(list({str_type: Self} for str_type in string_types) + list(string_types))
|
{str: Self},
|
||||||
|
str,
|
||||||
))
|
))
|
||||||
|
|
||||||
|
|
||||||
def version_added(v, error_code='version-added-invalid', accept_historical=False):
|
def version_added(v, error_code='version-added-invalid', accept_historical=False):
|
||||||
if 'version_added' in v:
|
if 'version_added' in v:
|
||||||
version_added = v.get('version_added')
|
version_added = v.get('version_added')
|
||||||
if isinstance(version_added, string_types):
|
if isinstance(version_added, str):
|
||||||
# If it is not a string, schema validation will have already complained
|
# If it is not a string, schema validation will have already complained
|
||||||
# - or we have a float and we are in ansible/ansible, in which case we're
|
# - or we have a float and we are in ansible/ansible, in which case we're
|
||||||
# also happy.
|
# also happy.
|
||||||
|
|
@ -451,7 +449,7 @@ def get_type_checker(v):
|
||||||
elt_checker, elt_name = get_type_checker({'type': v.get('elements')})
|
elt_checker, elt_name = get_type_checker({'type': v.get('elements')})
|
||||||
|
|
||||||
def list_checker(value):
|
def list_checker(value):
|
||||||
if isinstance(value, string_types):
|
if isinstance(value, str):
|
||||||
value = [unquote(x.strip()) for x in value.split(',')]
|
value = [unquote(x.strip()) for x in value.split(',')]
|
||||||
if not isinstance(value, list):
|
if not isinstance(value, list):
|
||||||
raise ValueError('Value must be a list')
|
raise ValueError('Value must be a list')
|
||||||
|
|
@ -482,14 +480,14 @@ def get_type_checker(v):
|
||||||
|
|
||||||
if v_type in ('str', 'string', 'path', 'tmp', 'temppath', 'tmppath'):
|
if v_type in ('str', 'string', 'path', 'tmp', 'temppath', 'tmppath'):
|
||||||
def str_checker(value):
|
def str_checker(value):
|
||||||
if not isinstance(value, string_types):
|
if not isinstance(value, str):
|
||||||
raise ValueError('Value must be string')
|
raise ValueError('Value must be string')
|
||||||
|
|
||||||
return str_checker, v_type
|
return str_checker, v_type
|
||||||
|
|
||||||
if v_type in ('pathspec', 'pathlist'):
|
if v_type in ('pathspec', 'pathlist'):
|
||||||
def path_list_checker(value):
|
def path_list_checker(value):
|
||||||
if not isinstance(value, string_types) and not is_iterable(value):
|
if not isinstance(value, str) and not is_iterable(value):
|
||||||
raise ValueError('Value must be string or list of strings')
|
raise ValueError('Value must be string or list of strings')
|
||||||
|
|
||||||
return path_list_checker, v_type
|
return path_list_checker, v_type
|
||||||
|
|
@ -588,7 +586,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
'elements': element_types,
|
'elements': element_types,
|
||||||
}
|
}
|
||||||
if plugin_type != 'module':
|
if plugin_type != 'module':
|
||||||
basic_option_schema['name'] = Any(*string_types)
|
basic_option_schema['name'] = str
|
||||||
deprecated_schema = All(
|
deprecated_schema = All(
|
||||||
Schema(
|
Schema(
|
||||||
All(
|
All(
|
||||||
|
|
@ -605,10 +603,10 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
# This definition makes sure that everything we require is there
|
# This definition makes sure that everything we require is there
|
||||||
Required('why'): Any(*string_types),
|
Required('why'): str,
|
||||||
Required(Any('alternatives', 'alternative')): Any(*string_types),
|
Required(Any('alternatives', 'alternative')): str,
|
||||||
Required(Any('removed_at_date', 'version')): Any(*string_types),
|
Required(Any('removed_at_date', 'version')): str,
|
||||||
Required('collection_name'): Any(*string_types),
|
Required('collection_name'): str,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
extra=PREVENT_EXTRA
|
extra=PREVENT_EXTRA
|
||||||
|
|
@ -620,7 +618,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
)
|
)
|
||||||
env_schema = All(
|
env_schema = All(
|
||||||
Schema({
|
Schema({
|
||||||
Required('name'): Any(*string_types),
|
Required('name'): str,
|
||||||
'deprecated': deprecated_schema,
|
'deprecated': deprecated_schema,
|
||||||
'version_added': version(for_collection),
|
'version_added': version(for_collection),
|
||||||
'version_added_collection': collection_name,
|
'version_added_collection': collection_name,
|
||||||
|
|
@ -629,8 +627,8 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
)
|
)
|
||||||
ini_schema = All(
|
ini_schema = All(
|
||||||
Schema({
|
Schema({
|
||||||
Required('key'): Any(*string_types),
|
Required('key'): str,
|
||||||
Required('section'): Any(*string_types),
|
Required('section'): str,
|
||||||
'deprecated': deprecated_schema,
|
'deprecated': deprecated_schema,
|
||||||
'version_added': version(for_collection),
|
'version_added': version(for_collection),
|
||||||
'version_added_collection': collection_name,
|
'version_added_collection': collection_name,
|
||||||
|
|
@ -639,7 +637,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
)
|
)
|
||||||
vars_schema = All(
|
vars_schema = All(
|
||||||
Schema({
|
Schema({
|
||||||
Required('name'): Any(*string_types),
|
Required('name'): str,
|
||||||
'deprecated': deprecated_schema,
|
'deprecated': deprecated_schema,
|
||||||
'version_added': version(for_collection),
|
'version_added': version(for_collection),
|
||||||
'version_added_collection': collection_name,
|
'version_added_collection': collection_name,
|
||||||
|
|
@ -648,8 +646,8 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
)
|
)
|
||||||
cli_schema = All(
|
cli_schema = All(
|
||||||
Schema({
|
Schema({
|
||||||
Required('name'): Any(*string_types),
|
Required('name'): str,
|
||||||
'option': Any(*string_types),
|
'option': str,
|
||||||
'deprecated': deprecated_schema,
|
'deprecated': deprecated_schema,
|
||||||
'version_added': version(for_collection),
|
'version_added': version(for_collection),
|
||||||
'version_added_collection': collection_name,
|
'version_added_collection': collection_name,
|
||||||
|
|
@ -658,7 +656,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
)
|
)
|
||||||
keyword_schema = All(
|
keyword_schema = All(
|
||||||
Schema({
|
Schema({
|
||||||
Required('name'): Any(*string_types),
|
Required('name'): str,
|
||||||
'deprecated': deprecated_schema,
|
'deprecated': deprecated_schema,
|
||||||
'version_added': version(for_collection),
|
'version_added': version(for_collection),
|
||||||
'version_added_collection': collection_name,
|
'version_added_collection': collection_name,
|
||||||
|
|
@ -677,7 +675,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
suboption_schema = dict(basic_option_schema)
|
suboption_schema = dict(basic_option_schema)
|
||||||
suboption_schema.update({
|
suboption_schema.update({
|
||||||
# Recursive suboptions
|
# Recursive suboptions
|
||||||
'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)),
|
'suboptions': Any(None, {str: Self}),
|
||||||
})
|
})
|
||||||
suboption_schema = Schema(All(
|
suboption_schema = Schema(All(
|
||||||
suboption_schema,
|
suboption_schema,
|
||||||
|
|
@ -686,13 +684,9 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
check_option_default,
|
check_option_default,
|
||||||
), extra=PREVENT_EXTRA)
|
), extra=PREVENT_EXTRA)
|
||||||
|
|
||||||
# This generates list of dicts with keys from string_types and suboption_schema value
|
|
||||||
# for example in Python 3: {str: suboption_schema}
|
|
||||||
list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types]
|
|
||||||
|
|
||||||
option_schema = dict(basic_option_schema)
|
option_schema = dict(basic_option_schema)
|
||||||
option_schema.update({
|
option_schema.update({
|
||||||
'suboptions': Any(None, *list_dict_suboption_schema),
|
'suboptions': Any(None, {str: suboption_schema}),
|
||||||
})
|
})
|
||||||
option_schema = Schema(All(
|
option_schema = Schema(All(
|
||||||
option_schema,
|
option_schema,
|
||||||
|
|
@ -703,20 +697,18 @@ def list_dict_option_schema(for_collection, plugin_type):
|
||||||
|
|
||||||
option_version_added = Schema(
|
option_version_added = Schema(
|
||||||
All({
|
All({
|
||||||
'suboptions': Any(None, *[{str_type: Self} for str_type in string_types]),
|
'suboptions': Any(None, {str: Self}),
|
||||||
}, partial(version_added, error_code='option-invalid-version-added')),
|
}, partial(version_added, error_code='option-invalid-version-added')),
|
||||||
extra=ALLOW_EXTRA
|
extra=ALLOW_EXTRA
|
||||||
)
|
)
|
||||||
|
|
||||||
# This generates list of dicts with keys from string_types and option_schema value
|
return [{str: All(option_schema, option_version_added)}]
|
||||||
# for example in Python 3: {str: option_schema}
|
|
||||||
return [{str_type: All(option_schema, option_version_added)} for str_type in string_types]
|
|
||||||
|
|
||||||
|
|
||||||
def return_contains(v):
|
def return_contains(v):
|
||||||
schema = Schema(
|
schema = Schema(
|
||||||
{
|
{
|
||||||
Required('contains'): Any(dict, list, *string_types)
|
Required('contains'): Any(dict, list, str)
|
||||||
},
|
},
|
||||||
extra=ALLOW_EXTRA
|
extra=ALLOW_EXTRA
|
||||||
)
|
)
|
||||||
|
|
@ -752,7 +744,7 @@ def return_schema(for_collection, plugin_type='module'):
|
||||||
|
|
||||||
inner_return_option_schema = dict(basic_return_option_schema)
|
inner_return_option_schema = dict(basic_return_option_schema)
|
||||||
inner_return_option_schema.update({
|
inner_return_option_schema.update({
|
||||||
'contains': Any(None, *list({str_type: Self} for str_type in string_types)),
|
'contains': Any(None, {str: Self}),
|
||||||
})
|
})
|
||||||
return_contains_schema = Any(
|
return_contains_schema = Any(
|
||||||
All(
|
All(
|
||||||
|
|
@ -763,27 +755,23 @@ def return_schema(for_collection, plugin_type='module'):
|
||||||
Schema(type(None)),
|
Schema(type(None)),
|
||||||
)
|
)
|
||||||
|
|
||||||
# This generates list of dicts with keys from string_types and return_contains_schema value
|
|
||||||
# for example in Python 3: {str: return_contains_schema}
|
|
||||||
list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types]
|
|
||||||
|
|
||||||
return_option_schema = dict(basic_return_option_schema)
|
return_option_schema = dict(basic_return_option_schema)
|
||||||
return_option_schema.update({
|
return_option_schema.update({
|
||||||
'contains': Any(None, *list_dict_return_contains_schema),
|
'contains': Any(None, {str: return_contains_schema}),
|
||||||
})
|
})
|
||||||
if plugin_type == 'module':
|
if plugin_type == 'module':
|
||||||
# 'returned' is required on top-level
|
# 'returned' is required on top-level
|
||||||
del return_option_schema['returned']
|
del return_option_schema['returned']
|
||||||
return_option_schema[Required('returned')] = Any(*string_types)
|
return_option_schema[Required('returned')] = str
|
||||||
return Any(
|
return Any(
|
||||||
All(
|
All(
|
||||||
Schema(
|
Schema(
|
||||||
{
|
{
|
||||||
any_string_types: return_option_schema
|
str: return_option_schema
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
Schema({any_string_types: return_contains}),
|
Schema({str: return_contains}),
|
||||||
Schema({any_string_types: partial(version_added, error_code='option-invalid-version-added')}),
|
Schema({str: partial(version_added, error_code='option-invalid-version-added')}),
|
||||||
),
|
),
|
||||||
Schema(type(None)),
|
Schema(type(None)),
|
||||||
)
|
)
|
||||||
|
|
@ -840,7 +828,7 @@ def author(value):
|
||||||
value = [value]
|
value = [value]
|
||||||
|
|
||||||
for line in value:
|
for line in value:
|
||||||
if not isinstance(line, string_types):
|
if not isinstance(line, str):
|
||||||
continue # let schema checks handle
|
continue # let schema checks handle
|
||||||
m = author_line.search(line)
|
m = author_line.search(line)
|
||||||
if not m:
|
if not m:
|
||||||
|
|
@ -868,14 +856,14 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
|
||||||
'requirements': [doc_string],
|
'requirements': [doc_string],
|
||||||
'todo': Any(None, doc_string_or_strings),
|
'todo': Any(None, doc_string_or_strings),
|
||||||
'options': Any(None, *list_dict_option_schema(for_collection, plugin_type)),
|
'options': Any(None, *list_dict_option_schema(for_collection, plugin_type)),
|
||||||
'extends_documentation_fragment': Any(list_string_types, *string_types),
|
'extends_documentation_fragment': Any(list_string_types, str),
|
||||||
'version_added_collection': collection_name,
|
'version_added_collection': collection_name,
|
||||||
}
|
}
|
||||||
if plugin_type == 'module':
|
if plugin_type == 'module':
|
||||||
doc_schema_dict[Required('author')] = All(Any(None, list_string_types, *string_types), author)
|
doc_schema_dict[Required('author')] = All(Any(None, list_string_types, str), author)
|
||||||
else:
|
else:
|
||||||
# author is optional for plugins (for now)
|
# author is optional for plugins (for now)
|
||||||
doc_schema_dict['author'] = All(Any(None, list_string_types, *string_types), author)
|
doc_schema_dict['author'] = All(Any(None, list_string_types, str), author)
|
||||||
if plugin_type == 'callback':
|
if plugin_type == 'callback':
|
||||||
doc_schema_dict[Required('type')] = Any('aggregate', 'notification', 'stdout')
|
doc_schema_dict[Required('type')] = Any('aggregate', 'notification', 'stdout')
|
||||||
|
|
||||||
|
|
@ -896,9 +884,9 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
|
||||||
schema = {
|
schema = {
|
||||||
'description': doc_string_or_strings,
|
'description': doc_string_or_strings,
|
||||||
'details': doc_string_or_strings,
|
'details': doc_string_or_strings,
|
||||||
'support': any_string_types,
|
'support': str,
|
||||||
'version_added_collection': any_string_types,
|
'version_added_collection': str,
|
||||||
'version_added': any_string_types,
|
'version_added': str,
|
||||||
}
|
}
|
||||||
if more:
|
if more:
|
||||||
schema.update(more)
|
schema.update(more)
|
||||||
|
|
@ -907,7 +895,7 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
|
||||||
doc_schema_dict['attributes'] = Schema(
|
doc_schema_dict['attributes'] = Schema(
|
||||||
All(
|
All(
|
||||||
Schema({
|
Schema({
|
||||||
any_string_types: {
|
str: {
|
||||||
Required('description'): doc_string_or_strings,
|
Required('description'): doc_string_or_strings,
|
||||||
Required('support'): Any('full', 'partial', 'none', 'N/A'),
|
Required('support'): Any('full', 'partial', 'none', 'N/A'),
|
||||||
'details': doc_string_or_strings,
|
'details': doc_string_or_strings,
|
||||||
|
|
@ -917,12 +905,12 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
|
||||||
}, extra=ALLOW_EXTRA),
|
}, extra=ALLOW_EXTRA),
|
||||||
partial(version_added, error_code='attribute-invalid-version-added', accept_historical=False),
|
partial(version_added, error_code='attribute-invalid-version-added', accept_historical=False),
|
||||||
Schema({
|
Schema({
|
||||||
any_string_types: add_default_attributes(),
|
str: add_default_attributes(),
|
||||||
'action_group': add_default_attributes({
|
'action_group': add_default_attributes({
|
||||||
Required('membership'): list_string_types,
|
Required('membership'): list_string_types,
|
||||||
}),
|
}),
|
||||||
'platform': add_default_attributes({
|
'platform': add_default_attributes({
|
||||||
Required('platforms'): Any(list_string_types, *string_types)
|
Required('platforms'): Any(list_string_types, str)
|
||||||
}),
|
}),
|
||||||
}, extra=PREVENT_EXTRA),
|
}, extra=PREVENT_EXTRA),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -31,7 +31,6 @@ import yaml.reader
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.common.yaml import SafeLoader
|
from ansible.module_utils.common.yaml import SafeLoader
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -211,7 +210,7 @@ def parse_isodate(v, allow_date):
|
||||||
msg = 'Expected ISO 8601 date string (YYYY-MM-DD) or YAML date'
|
msg = 'Expected ISO 8601 date string (YYYY-MM-DD) or YAML date'
|
||||||
else:
|
else:
|
||||||
msg = 'Expected ISO 8601 date string (YYYY-MM-DD)'
|
msg = 'Expected ISO 8601 date string (YYYY-MM-DD)'
|
||||||
if not isinstance(v, string_types):
|
if not isinstance(v, str):
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
|
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
|
||||||
# we have to do things manually.
|
# we have to do things manually.
|
||||||
|
|
|
||||||
|
|
@ -18,8 +18,6 @@ from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ansible.module_utils.six import text_type
|
|
||||||
|
|
||||||
|
|
||||||
_UNSAFE_C = re.compile(u'[\\s\t"]')
|
_UNSAFE_C = re.compile(u'[\\s\t"]')
|
||||||
_UNSAFE_CMD = re.compile(u'[\\s\\(\\)\\^\\|%!"<>&]')
|
_UNSAFE_CMD = re.compile(u'[\\s\\(\\)\\^\\|%!"<>&]')
|
||||||
|
|
@ -30,7 +28,7 @@ _UNSAFE_CMD = re.compile(u'[\\s\\(\\)\\^\\|%!"<>&]')
|
||||||
_UNSAFE_PWSH = re.compile(u"(['\u2018\u2019\u201a\u201b])")
|
_UNSAFE_PWSH = re.compile(u"(['\u2018\u2019\u201a\u201b])")
|
||||||
|
|
||||||
|
|
||||||
def quote_c(s): # type: (text_type) -> text_type
|
def quote_c(s): # type: (str) -> str
|
||||||
"""Quotes a value for the raw Win32 process command line.
|
"""Quotes a value for the raw Win32 process command line.
|
||||||
|
|
||||||
Quotes a value to be safely used by anything that calls the Win32
|
Quotes a value to be safely used by anything that calls the Win32
|
||||||
|
|
@ -40,7 +38,7 @@ def quote_c(s): # type: (text_type) -> text_type
|
||||||
s: The string to quote.
|
s: The string to quote.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(text_type): The quoted string value.
|
(str): The quoted string value.
|
||||||
"""
|
"""
|
||||||
# https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way
|
# https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way
|
||||||
if not s:
|
if not s:
|
||||||
|
|
@ -62,7 +60,7 @@ def quote_c(s): # type: (text_type) -> text_type
|
||||||
return u'"{0}"'.format(s)
|
return u'"{0}"'.format(s)
|
||||||
|
|
||||||
|
|
||||||
def quote_cmd(s): # type: (text_type) -> text_type
|
def quote_cmd(s): # type: (str) -> str
|
||||||
"""Quotes a value for cmd.
|
"""Quotes a value for cmd.
|
||||||
|
|
||||||
Quotes a value to be safely used by a command prompt call.
|
Quotes a value to be safely used by a command prompt call.
|
||||||
|
|
@ -71,7 +69,7 @@ def quote_cmd(s): # type: (text_type) -> text_type
|
||||||
s: The string to quote.
|
s: The string to quote.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(text_type): The quoted string value.
|
(str): The quoted string value.
|
||||||
"""
|
"""
|
||||||
# https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way#a-better-method-of-quoting
|
# https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way#a-better-method-of-quoting
|
||||||
if not s:
|
if not s:
|
||||||
|
|
@ -92,7 +90,7 @@ def quote_cmd(s): # type: (text_type) -> text_type
|
||||||
return u'^"{0}^"'.format(s)
|
return u'^"{0}^"'.format(s)
|
||||||
|
|
||||||
|
|
||||||
def quote_pwsh(s): # type: (text_type) -> text_type
|
def quote_pwsh(s): # type: (str) -> str
|
||||||
"""Quotes a value for PowerShell.
|
"""Quotes a value for PowerShell.
|
||||||
|
|
||||||
Quotes a value to be safely used by a PowerShell expression. The input
|
Quotes a value to be safely used by a PowerShell expression. The input
|
||||||
|
|
@ -102,7 +100,7 @@ def quote_pwsh(s): # type: (text_type) -> text_type
|
||||||
s: The string to quote.
|
s: The string to quote.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(text_type): The quoted string value.
|
(str): The quoted string value.
|
||||||
"""
|
"""
|
||||||
# https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_quoting_rules?view=powershell-5.1
|
# https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_quoting_rules?view=powershell-5.1
|
||||||
if not s:
|
if not s:
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,6 @@ from ansible.plugins.loader import init_plugin_loader
|
||||||
MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
|
MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
|
||||||
'ansible/module_utils/__init__.py',
|
'ansible/module_utils/__init__.py',
|
||||||
'ansible/module_utils/basic.py',
|
'ansible/module_utils/basic.py',
|
||||||
'ansible/module_utils/six/__init__.py',
|
|
||||||
'ansible/module_utils/_internal/__init__.py',
|
'ansible/module_utils/_internal/__init__.py',
|
||||||
'ansible/module_utils/_internal/_ansiballz/__init__.py',
|
'ansible/module_utils/_internal/_ansiballz/__init__.py',
|
||||||
'ansible/module_utils/_internal/_ansiballz/_loader.py',
|
'ansible/module_utils/_internal/_ansiballz/_loader.py',
|
||||||
|
|
@ -46,6 +45,7 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
|
||||||
'ansible/module_utils/_internal/_traceback.py',
|
'ansible/module_utils/_internal/_traceback.py',
|
||||||
'ansible/module_utils/_internal/_validation.py',
|
'ansible/module_utils/_internal/_validation.py',
|
||||||
'ansible/module_utils/_internal/_messages.py',
|
'ansible/module_utils/_internal/_messages.py',
|
||||||
|
'ansible/module_utils/_internal/_no_six.py',
|
||||||
'ansible/module_utils/_internal/_patches/_dataclass_annotation_patch.py',
|
'ansible/module_utils/_internal/_patches/_dataclass_annotation_patch.py',
|
||||||
'ansible/module_utils/_internal/_patches/_socket_patch.py',
|
'ansible/module_utils/_internal/_patches/_socket_patch.py',
|
||||||
'ansible/module_utils/_internal/_patches/_sys_intern_patch.py',
|
'ansible/module_utils/_internal/_patches/_sys_intern_patch.py',
|
||||||
|
|
@ -78,7 +78,6 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
|
||||||
'ansible/module_utils/errors.py',
|
'ansible/module_utils/errors.py',
|
||||||
'ansible/module_utils/parsing/__init__.py',
|
'ansible/module_utils/parsing/__init__.py',
|
||||||
'ansible/module_utils/parsing/convert_bool.py',
|
'ansible/module_utils/parsing/convert_bool.py',
|
||||||
'ansible/module_utils/six/__init__.py',
|
|
||||||
))
|
))
|
||||||
|
|
||||||
ONLY_BASIC_FILE = frozenset(('ansible/module_utils/basic.py',))
|
ONLY_BASIC_FILE = frozenset(('ansible/module_utils/basic.py',))
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user