mirror of
https://github.com/zebrajr/ansible.git
synced 2025-12-06 00:19:48 +01:00
parent
45d62a726c
commit
0cc771dc3c
|
|
@ -107,7 +107,6 @@ from ansible import context
|
|||
from ansible.utils import display as _display
|
||||
from ansible.cli.arguments import option_helpers as opt_help
|
||||
from ansible.inventory.manager import InventoryManager
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.module_utils.common.collections import is_sequence
|
||||
from ansible.module_utils.common.file import is_executable
|
||||
|
|
@ -403,8 +402,8 @@ class CLI(ABC):
|
|||
options = super(MyCLI, self).post_process_args(options)
|
||||
if options.addition and options.subtraction:
|
||||
raise AnsibleOptionsError('Only one of --addition and --subtraction can be specified')
|
||||
if isinstance(options.listofhosts, string_types):
|
||||
options.listofhosts = string_types.split(',')
|
||||
if isinstance(options.listofhosts, str):
|
||||
options.listofhosts = options.listofhosts.split(',')
|
||||
return options
|
||||
"""
|
||||
|
||||
|
|
@ -440,7 +439,7 @@ class CLI(ABC):
|
|||
if options.inventory:
|
||||
|
||||
# should always be list
|
||||
if isinstance(options.inventory, string_types):
|
||||
if isinstance(options.inventory, str):
|
||||
options.inventory = [options.inventory]
|
||||
|
||||
# Ensure full paths when needed
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ from ansible.config.manager import ConfigManager
|
|||
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleRequiredOptionError
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
|
||||
from ansible._internal import _json
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.quoting import is_quoted
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
from ansible.utils.color import stringc
|
||||
|
|
@ -288,21 +287,21 @@ class ConfigCLI(CLI):
|
|||
default = '0'
|
||||
elif default:
|
||||
if stype == 'list':
|
||||
if not isinstance(default, string_types):
|
||||
if not isinstance(default, str):
|
||||
# python lists are not valid env ones
|
||||
try:
|
||||
default = ', '.join(default)
|
||||
except Exception as e:
|
||||
# list of other stuff
|
||||
default = '%s' % to_native(default)
|
||||
if isinstance(default, string_types) and not is_quoted(default):
|
||||
if isinstance(default, str) and not is_quoted(default):
|
||||
default = shlex.quote(default)
|
||||
elif default is None:
|
||||
default = ''
|
||||
|
||||
if subkey in settings[setting] and settings[setting][subkey]:
|
||||
entry = settings[setting][subkey][-1]['name']
|
||||
if isinstance(settings[setting]['description'], string_types):
|
||||
if isinstance(settings[setting]['description'], str):
|
||||
desc = settings[setting]['description']
|
||||
else:
|
||||
desc = '\n#'.join(settings[setting]['description'])
|
||||
|
|
@ -343,7 +342,7 @@ class ConfigCLI(CLI):
|
|||
sections[s] = new_sections[s]
|
||||
continue
|
||||
|
||||
if isinstance(opt['description'], string_types):
|
||||
if isinstance(opt['description'], str):
|
||||
desc = '# (%s) %s' % (opt.get('type', 'string'), opt['description'])
|
||||
else:
|
||||
desc = "# (%s) " % opt.get('type', 'string')
|
||||
|
|
@ -361,7 +360,7 @@ class ConfigCLI(CLI):
|
|||
seen[entry['section']].append(entry['key'])
|
||||
|
||||
default = self.config.template_default(opt.get('default', ''), get_constants())
|
||||
if opt.get('type', '') == 'list' and not isinstance(default, string_types):
|
||||
if opt.get('type', '') == 'list' and not isinstance(default, str):
|
||||
# python lists are not valid ini ones
|
||||
default = ', '.join(default)
|
||||
elif default is None:
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
|
|||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.common.collections import is_sequence
|
||||
from ansible.module_utils.common.yaml import yaml_dump
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.plugin_docs import read_docstub
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
|
|
@ -1274,7 +1273,7 @@ class DocCLI(CLI, RoleMixin):
|
|||
sub_indent = inline_indent + extra_indent
|
||||
if is_sequence(opt['description']):
|
||||
for entry_idx, entry in enumerate(opt['description'], 1):
|
||||
if not isinstance(entry, string_types):
|
||||
if not isinstance(entry, str):
|
||||
raise AnsibleError("Expected string in description of %s at index %s, got %s" % (o, entry_idx, type(entry)))
|
||||
if entry_idx == 1:
|
||||
text.append(key + DocCLI.warp_fill(DocCLI.tty_ify(entry), limit,
|
||||
|
|
@ -1282,7 +1281,7 @@ class DocCLI(CLI, RoleMixin):
|
|||
else:
|
||||
text.append(DocCLI.warp_fill(DocCLI.tty_ify(entry), limit, initial_indent=sub_indent, subsequent_indent=sub_indent))
|
||||
else:
|
||||
if not isinstance(opt['description'], string_types):
|
||||
if not isinstance(opt['description'], str):
|
||||
raise AnsibleError("Expected string in description of %s, got %s" % (o, type(opt['description'])))
|
||||
text.append(key + DocCLI.warp_fill(DocCLI.tty_ify(opt['description']), limit,
|
||||
initial_indent=inline_indent, subsequent_indent=sub_indent, initial_extra=len(extra_indent)))
|
||||
|
|
@ -1466,7 +1465,7 @@ class DocCLI(CLI, RoleMixin):
|
|||
if k not in doc:
|
||||
continue
|
||||
text.append('')
|
||||
if isinstance(doc[k], string_types):
|
||||
if isinstance(doc[k], str):
|
||||
text.append('%s: %s' % (k.upper(), DocCLI.warp_fill(DocCLI.tty_ify(doc[k]),
|
||||
limit - (len(k) + 2), subsequent_indent=opt_indent)))
|
||||
elif isinstance(doc[k], (list, tuple)):
|
||||
|
|
@ -1478,7 +1477,7 @@ class DocCLI(CLI, RoleMixin):
|
|||
if doc.get('examples', False):
|
||||
text.append('')
|
||||
text.append(_format("EXAMPLES:", 'bold'))
|
||||
if isinstance(doc['examples'], string_types):
|
||||
if isinstance(doc['examples'], str):
|
||||
text.append(doc.pop('examples').strip())
|
||||
else:
|
||||
try:
|
||||
|
|
@ -1572,7 +1571,7 @@ class DocCLI(CLI, RoleMixin):
|
|||
continue
|
||||
text.append('')
|
||||
header = _format(k.upper(), 'bold')
|
||||
if isinstance(doc[k], string_types):
|
||||
if isinstance(doc[k], str):
|
||||
text.append('%s: %s' % (header, DocCLI.warp_fill(DocCLI.tty_ify(doc[k]), limit - (len(k) + 2), subsequent_indent=opt_indent)))
|
||||
elif isinstance(doc[k], (list, tuple)):
|
||||
text.append('%s: %s' % (header, ', '.join(doc[k])))
|
||||
|
|
@ -1584,7 +1583,7 @@ class DocCLI(CLI, RoleMixin):
|
|||
if doc.get('plainexamples', False):
|
||||
text.append('')
|
||||
text.append(_format("EXAMPLES:", 'bold'))
|
||||
if isinstance(doc['plainexamples'], string_types):
|
||||
if isinstance(doc['plainexamples'], str):
|
||||
text.append(doc.pop('plainexamples').strip())
|
||||
else:
|
||||
try:
|
||||
|
|
@ -1621,7 +1620,7 @@ def _do_yaml_snippet(doc):
|
|||
|
||||
for o in sorted(doc['options'].keys()):
|
||||
opt = doc['options'][o]
|
||||
if isinstance(opt['description'], string_types):
|
||||
if isinstance(opt['description'], str):
|
||||
desc = DocCLI.tty_ify(opt['description'])
|
||||
else:
|
||||
desc = DocCLI.tty_ify(" ".join(opt['description']))
|
||||
|
|
|
|||
|
|
@ -54,7 +54,6 @@ from ansible.module_utils.common.collections import is_iterable
|
|||
from ansible.module_utils.common.yaml import yaml_dump, yaml_load
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible._internal._datatag._tags import TrustedAsTemplate
|
||||
from ansible.module_utils import six
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.playbook.role.requirement import RoleRequirement
|
||||
from ansible._internal._templating._engine import TemplateEngine
|
||||
|
|
@ -65,7 +64,6 @@ from ansible.utils.plugin_docs import get_versioned_doclink
|
|||
from ansible.utils.vars import load_extra_vars
|
||||
|
||||
display = Display()
|
||||
urlparse = six.moves.urllib.parse.urlparse
|
||||
|
||||
|
||||
def with_collection_artifacts_manager(wrapped_method):
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ from ansible._internal._datatag._tags import TrustedAsTemplate
|
|||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.module_utils.common.text.converters import to_text, to_native
|
||||
from ansible.module_utils.connection import write_to_stream
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.playbook.task import Task
|
||||
from ansible.plugins import get_plugin_class
|
||||
from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader
|
||||
|
|
@ -340,7 +339,7 @@ class TaskExecutor:
|
|||
})
|
||||
|
||||
# if plugin is loaded, get resolved name, otherwise leave original task connection
|
||||
if self._connection and not isinstance(self._connection, string_types):
|
||||
if self._connection and not isinstance(self._connection, str):
|
||||
task_fields['connection'] = getattr(self._connection, 'ansible_name')
|
||||
|
||||
tr = _RawTaskResult(
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ from ansible.errors import AnsibleError
|
|||
from ansible.galaxy.user_agent import user_agent
|
||||
from ansible.module_utils.api import retry_with_delays_and_condition
|
||||
from ansible.module_utils.api import generate_jittered_backoff
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.urls import open_url, prepare_multipart
|
||||
from ansible.utils.display import Display
|
||||
|
|
@ -595,11 +594,11 @@ class GalaxyAPI:
|
|||
page_size = kwargs.get('page_size', None)
|
||||
author = kwargs.get('author', None)
|
||||
|
||||
if tags and isinstance(tags, string_types):
|
||||
if tags and isinstance(tags, str):
|
||||
tags = tags.split(',')
|
||||
search_url += '&tags_autocomplete=' + '+'.join(tags)
|
||||
|
||||
if platforms and isinstance(platforms, string_types):
|
||||
if platforms and isinstance(platforms, str):
|
||||
platforms = platforms.split(',')
|
||||
search_url += '&platforms_autocomplete=' + '+'.join(platforms)
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ from ansible.galaxy.dependency_resolution.versioning import (
|
|||
is_pre_release,
|
||||
meets_requirements,
|
||||
)
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.utils.version import SemanticVersion, LooseVersion
|
||||
|
||||
try:
|
||||
|
|
@ -278,7 +277,7 @@ class CollectionDependencyProviderBase(AbstractProvider):
|
|||
# NOTE: Another known mistake is setting a minor part of the SemVer notation
|
||||
# NOTE: skipping the "patch" bit like "1.0" which is assumed non-compliant even
|
||||
# NOTE: after the conversion to string.
|
||||
if not isinstance(version, string_types):
|
||||
if not isinstance(version, str):
|
||||
raise ValueError(version_err)
|
||||
elif version != '*':
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -33,7 +33,6 @@ from ansible._internal import _json, _wrapt
|
|||
from ansible._internal._json import EncryptedStringBehavior
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.inventory.data import InventoryData
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.parsing.utils.addresses import parse_address
|
||||
from ansible.plugins.loader import inventory_loader
|
||||
|
|
@ -112,7 +111,7 @@ def split_host_pattern(pattern):
|
|||
results = (split_host_pattern(p) for p in pattern)
|
||||
# flatten the results
|
||||
return list(itertools.chain.from_iterable(results))
|
||||
elif not isinstance(pattern, string_types):
|
||||
elif not isinstance(pattern, str):
|
||||
pattern = to_text(pattern, errors='surrogate_or_strict')
|
||||
|
||||
# If it's got commas in it, we'll treat it as a straightforward
|
||||
|
|
@ -162,7 +161,7 @@ class InventoryManager(object):
|
|||
# the inventory dirs, files, script paths or lists of hosts
|
||||
if sources is None:
|
||||
self._sources = []
|
||||
elif isinstance(sources, string_types):
|
||||
elif isinstance(sources, str):
|
||||
self._sources = [sources]
|
||||
else:
|
||||
self._sources = sources
|
||||
|
|
|
|||
86
lib/ansible/module_utils/_internal/_no_six.py
Normal file
86
lib/ansible/module_utils/_internal/_no_six.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import types
|
||||
|
||||
from ansible.module_utils.common import warnings
|
||||
|
||||
|
||||
# INLINED FROM THE SIX LIBRARY, see lib/ansible/module_utils/six/__init__.py
|
||||
# Copyright (c) 2010-2024 Benjamin Peterson
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(type):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
if sys.version_info[:2] >= (3, 7):
|
||||
# This version introduced PEP 560 that requires a bit
|
||||
# of extra care (we mimic what is done by __build_class__).
|
||||
resolved_bases = types.resolve_bases(bases)
|
||||
if resolved_bases is not bases:
|
||||
d['__orig_bases__'] = bases
|
||||
else:
|
||||
resolved_bases = bases
|
||||
return meta(name, resolved_bases, d)
|
||||
|
||||
@classmethod
|
||||
def __prepare__(cls, name, this_bases):
|
||||
return meta.__prepare__(name, bases)
|
||||
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
if hasattr(cls, '__qualname__'):
|
||||
orig_vars['__qualname__'] = cls.__qualname__
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
|
||||
_mini_six = {
|
||||
"PY2": False,
|
||||
"PY3": True,
|
||||
"text_type": str,
|
||||
"binary_type": bytes,
|
||||
"string_types": (str,),
|
||||
"integer_types": (int,),
|
||||
"iteritems": iteritems,
|
||||
"add_metaclass": add_metaclass,
|
||||
"with_metaclass": with_metaclass,
|
||||
}
|
||||
# INLINED SIX END
|
||||
|
||||
|
||||
def deprecate(importable_name: str, module_name: str, *deprecated_args) -> object:
|
||||
"""Inject import-time deprecation warnings."""
|
||||
if not (importable_name in deprecated_args and (importable := _mini_six.get(importable_name, ...) is not ...)):
|
||||
raise AttributeError(f"module {module_name!r} has no attribute {importable_name!r}")
|
||||
|
||||
# TODO Inspect and remove all calls to this function in 2.24
|
||||
warnings.deprecate(
|
||||
msg=f"Importing {importable_name!r} from {module_name!r} is deprecated.",
|
||||
version="2.24",
|
||||
)
|
||||
|
||||
return importable
|
||||
|
|
@ -1,15 +1,35 @@
|
|||
# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com> 2016
|
||||
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
|
||||
"""
|
||||
.. warn:: Use ansible.module_utils.common.text.converters instead.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
# Backwards compat for people still calling it from this package
|
||||
# pylint: disable=unused-import
|
||||
import codecs
|
||||
from ansible.module_utils.common import warnings as _warnings
|
||||
|
||||
from ansible.module_utils.six import PY3, text_type, binary_type
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
_mini_six = {
|
||||
"binary_type": bytes,
|
||||
"text_type": str,
|
||||
"PY3": True,
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(importable_name: str) -> object:
|
||||
"""Inject import-time deprecation warnings."""
|
||||
help_text: str | None = None
|
||||
importable: object
|
||||
if importable_name == "codecs":
|
||||
import codecs
|
||||
importable = codecs
|
||||
elif importable_name in {"to_bytes", "to_native", "to_text"}:
|
||||
from ansible.module_utils.common.text import converters
|
||||
importable = getattr(converters, importable_name)
|
||||
help_text = "Use ansible.module_utils.common.text.converters instead."
|
||||
elif (importable := _mini_six.get(importable_name, ...)) is ...:
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {importable_name!r}")
|
||||
|
||||
_warnings.deprecate(
|
||||
msg=f"Importing {importable_name!r} from {__name__!r} is deprecated.",
|
||||
version="2.24",
|
||||
help_text=help_text,
|
||||
)
|
||||
return importable
|
||||
|
|
|
|||
|
|
@ -46,6 +46,15 @@ import tempfile
|
|||
import time
|
||||
import traceback
|
||||
|
||||
from collections.abc import (
|
||||
KeysView,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Sequence,
|
||||
MutableSequence,
|
||||
Set,
|
||||
MutableSet,
|
||||
)
|
||||
from functools import reduce
|
||||
|
||||
try:
|
||||
|
|
@ -123,13 +132,6 @@ def _get_available_hash_algorithms():
|
|||
AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms()
|
||||
|
||||
from ansible.module_utils.common import json as _json
|
||||
|
||||
from ansible.module_utils.six.moves.collections_abc import (
|
||||
KeysView,
|
||||
Mapping, MutableMapping,
|
||||
Sequence, MutableSequence,
|
||||
Set, MutableSet,
|
||||
)
|
||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||
from ansible.module_utils.common.process import get_bin_path
|
||||
from ansible.module_utils.common.file import (
|
||||
|
|
@ -2186,6 +2188,18 @@ def get_module_path():
|
|||
return os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
||||
_mini_six = {
|
||||
"b": lambda s: s.encode("latin-1"),
|
||||
"PY2": False,
|
||||
"PY3": True,
|
||||
"text_type": str,
|
||||
"binary_type": bytes,
|
||||
"string_types": (str,),
|
||||
"integer_types": (int,),
|
||||
"iteritems": lambda d, **kw: iter(d.items(**kw)),
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
"""Inject import-time deprecation warnings."""
|
||||
if importable_name == 'datetime':
|
||||
|
|
@ -2203,24 +2217,12 @@ def __getattr__(importable_name):
|
|||
elif importable_name == 'repeat':
|
||||
from itertools import repeat
|
||||
importable = repeat
|
||||
elif importable_name in {
|
||||
'PY2', 'PY3', 'b', 'binary_type', 'integer_types',
|
||||
'iteritems', 'string_types', 'text_type',
|
||||
}:
|
||||
import importlib
|
||||
importable = getattr(
|
||||
importlib.import_module('ansible.module_utils.six'),
|
||||
importable_name
|
||||
)
|
||||
elif importable_name == 'map':
|
||||
importable = map
|
||||
elif importable_name == 'shlex_quote':
|
||||
importable = shlex.quote
|
||||
else:
|
||||
raise AttributeError(
|
||||
f'cannot import name {importable_name !r} '
|
||||
f"from '{__name__}' ({__file__ !s})"
|
||||
)
|
||||
elif (importable := _mini_six.get(importable_name, ...)) is ...:
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {importable_name!r}")
|
||||
|
||||
deprecate(
|
||||
msg=f"Importing '{importable_name}' from '{__name__}' is deprecated.",
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
"""Collections ABC import shim.
|
||||
|
||||
Use `ansible.module_utils.six.moves.collections_abc` instead, which has been available since ansible-core 2.11.
|
||||
Use `collections.abc` instead.
|
||||
This module exists only for backwards compatibility.
|
||||
"""
|
||||
|
||||
|
|
@ -10,7 +10,7 @@ from __future__ import annotations
|
|||
|
||||
# Although this was originally intended for internal use only, it has wide adoption in collections.
|
||||
# This is due in part to sanity tests previously recommending its use over `collections` imports.
|
||||
from ansible.module_utils.six.moves.collections_abc import ( # pylint: disable=unused-import
|
||||
from collections.abc import ( # pylint: disable=unused-import
|
||||
MappingView,
|
||||
ItemsView,
|
||||
KeysView,
|
||||
|
|
@ -25,3 +25,12 @@ from ansible.module_utils.six.moves.collections_abc import ( # pylint: disable=
|
|||
Iterable,
|
||||
Iterator,
|
||||
)
|
||||
|
||||
from ansible.module_utils.common import warnings as _warnings
|
||||
|
||||
|
||||
_warnings.deprecate(
|
||||
msg="The `ansible.module_utils.common._collections_compat` module is deprecated.",
|
||||
help_text="Use `collections.abc` from the Python standard library instead.",
|
||||
version="2.24",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,9 +6,10 @@
|
|||
from __future__ import annotations
|
||||
|
||||
|
||||
from collections.abc import Hashable, Mapping, MutableMapping, Sequence # pylint: disable=unused-import
|
||||
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.common import warnings as _warnings
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
from ansible.module_utils.six.moves.collections_abc import Hashable, Mapping, MutableMapping, Sequence # pylint: disable=unused-import
|
||||
|
||||
|
||||
class ImmutableDict(Hashable, Mapping):
|
||||
|
|
@ -67,7 +68,7 @@ class ImmutableDict(Hashable, Mapping):
|
|||
|
||||
def is_string(seq):
|
||||
"""Identify whether the input has a string-like type (including bytes)."""
|
||||
return isinstance(seq, (text_type, binary_type))
|
||||
return isinstance(seq, (str, bytes))
|
||||
|
||||
|
||||
def is_iterable(seq, include_strings=False):
|
||||
|
|
@ -114,3 +115,7 @@ def count(seq):
|
|||
for elem in seq:
|
||||
counters[elem] = counters.get(elem, 0) + 1
|
||||
return counters
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type")
|
||||
|
|
|
|||
|
|
@ -7,10 +7,9 @@ from __future__ import annotations
|
|||
|
||||
|
||||
import re
|
||||
from collections.abc import MutableMapping
|
||||
from copy import deepcopy
|
||||
|
||||
from ansible.module_utils.six.moves.collections_abc import MutableMapping
|
||||
|
||||
|
||||
def camel_dict_to_snake_dict(camel_dict, reversible=False, ignore_list=()):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -6,11 +6,13 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
# backward compat
|
||||
from builtins import zip # pylint: disable=unused-import
|
||||
|
||||
from struct import pack
|
||||
from socket import inet_ntoa
|
||||
|
||||
from ansible.module_utils.six.moves import zip
|
||||
|
||||
|
||||
VALID_MASKS = [2**8 - 2**i for i in range(0, 9)]
|
||||
|
||||
|
|
|
|||
|
|
@ -9,9 +9,19 @@ import os
|
|||
import typing as t
|
||||
|
||||
from collections import deque
|
||||
from itertools import chain
|
||||
from collections.abc import (
|
||||
KeysView,
|
||||
Set,
|
||||
Sequence,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
MutableSet,
|
||||
MutableSequence,
|
||||
)
|
||||
from itertools import chain # pylint: disable=unused-import
|
||||
|
||||
from ansible.module_utils.common.collections import is_iterable
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils._internal._datatag import AnsibleSerializable, AnsibleTagHelper
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.common.warnings import warn
|
||||
|
|
@ -33,26 +43,6 @@ from ansible.module_utils.errors import (
|
|||
SubParameterTypeError,
|
||||
)
|
||||
from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE
|
||||
|
||||
from ansible.module_utils.six.moves.collections_abc import (
|
||||
KeysView,
|
||||
Set,
|
||||
Sequence,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
MutableSet,
|
||||
MutableSequence,
|
||||
)
|
||||
|
||||
from ansible.module_utils.six import (
|
||||
binary_type,
|
||||
integer_types,
|
||||
string_types,
|
||||
text_type,
|
||||
PY2,
|
||||
PY3,
|
||||
)
|
||||
|
||||
from ansible.module_utils.common.validation import (
|
||||
check_mutually_exclusive,
|
||||
check_required_arguments,
|
||||
|
|
@ -243,7 +233,7 @@ def _handle_aliases(argument_spec, parameters, alias_warnings=None, alias_deprec
|
|||
if aliases is None:
|
||||
continue
|
||||
|
||||
if not is_iterable(aliases) or isinstance(aliases, (binary_type, text_type)):
|
||||
if not is_iterable(aliases) or isinstance(aliases, (bytes, str)):
|
||||
raise TypeError('internal error: aliases must be a list or tuple')
|
||||
|
||||
for alias in aliases:
|
||||
|
|
@ -346,7 +336,7 @@ def _list_no_log_values(argument_spec, params):
|
|||
for sub_param in sub_parameters:
|
||||
# Validate dict fields in case they came in as strings
|
||||
|
||||
if isinstance(sub_param, string_types):
|
||||
if isinstance(sub_param, str):
|
||||
sub_param = check_type_dict(sub_param)
|
||||
|
||||
if not isinstance(sub_param, Mapping):
|
||||
|
|
@ -362,7 +352,7 @@ def _return_datastructure_name(obj):
|
|||
""" Return native stringified values from datastructures.
|
||||
|
||||
For use with removing sensitive values pre-jsonification."""
|
||||
if isinstance(obj, (text_type, binary_type)):
|
||||
if isinstance(obj, (str, bytes)):
|
||||
if obj:
|
||||
yield to_native(obj, errors='surrogate_or_strict')
|
||||
return
|
||||
|
|
@ -375,7 +365,7 @@ def _return_datastructure_name(obj):
|
|||
elif obj is None or isinstance(obj, bool):
|
||||
# This must come before int because bools are also ints
|
||||
return
|
||||
elif isinstance(obj, tuple(list(integer_types) + [float])):
|
||||
elif isinstance(obj, (int, float)):
|
||||
yield to_native(obj, nonstring='simplerepr')
|
||||
else:
|
||||
raise TypeError('Unknown parameter type: %s' % (type(obj)))
|
||||
|
|
@ -413,26 +403,23 @@ def _remove_values_conditions(value, no_log_strings, deferred_removals):
|
|||
"""
|
||||
original_value = value
|
||||
|
||||
if isinstance(value, (text_type, binary_type)):
|
||||
if isinstance(value, (str, bytes)):
|
||||
# Need native str type
|
||||
native_str_value = value
|
||||
if isinstance(value, text_type):
|
||||
if isinstance(value, str):
|
||||
value_is_text = True
|
||||
if PY2:
|
||||
native_str_value = to_bytes(value, errors='surrogate_or_strict')
|
||||
elif isinstance(value, binary_type):
|
||||
elif isinstance(value, bytes):
|
||||
value_is_text = False
|
||||
if PY3:
|
||||
native_str_value = to_text(value, errors='surrogate_or_strict')
|
||||
native_str_value = to_text(value, errors='surrogate_or_strict')
|
||||
|
||||
if native_str_value in no_log_strings:
|
||||
return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
|
||||
for omit_me in no_log_strings:
|
||||
native_str_value = native_str_value.replace(omit_me, '*' * 8)
|
||||
|
||||
if value_is_text and isinstance(native_str_value, binary_type):
|
||||
if value_is_text and isinstance(native_str_value, bytes):
|
||||
value = to_text(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
|
||||
elif not value_is_text and isinstance(native_str_value, text_type):
|
||||
elif not value_is_text and isinstance(native_str_value, str):
|
||||
value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
|
||||
else:
|
||||
value = native_str_value
|
||||
|
|
@ -514,7 +501,7 @@ def _set_defaults(argument_spec, parameters, set_default=True):
|
|||
|
||||
def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals):
|
||||
""" Helper method to :func:`sanitize_keys` to build ``deferred_removals`` and avoid deep recursion. """
|
||||
if isinstance(value, (text_type, binary_type)):
|
||||
if isinstance(value, (str, bytes)):
|
||||
return value
|
||||
|
||||
if isinstance(value, Sequence):
|
||||
|
|
@ -541,7 +528,7 @@ def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_remov
|
|||
deferred_removals.append((value, new_value))
|
||||
return new_value
|
||||
|
||||
if isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))):
|
||||
if isinstance(value, (int, float, bool, NoneType)):
|
||||
return value
|
||||
|
||||
if isinstance(value, (datetime.datetime, datetime.date, datetime.time)):
|
||||
|
|
@ -560,8 +547,8 @@ def _validate_elements(wanted_type, parameter, values, options_context=None, err
|
|||
# Get param name for strings so we can later display this value in a useful error message if needed
|
||||
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
|
||||
kwargs = {}
|
||||
if wanted_element_type == 'str' and isinstance(wanted_type, string_types):
|
||||
if isinstance(parameter, string_types):
|
||||
if wanted_element_type == 'str' and isinstance(wanted_type, str):
|
||||
if isinstance(parameter, str):
|
||||
kwargs['param'] = parameter
|
||||
elif isinstance(parameter, dict):
|
||||
kwargs['param'] = list(parameter.keys())[0]
|
||||
|
|
@ -620,7 +607,7 @@ def _validate_argument_types(argument_spec, parameters, prefix='', options_conte
|
|||
# Get param name for strings so we can later display this value in a useful error message if needed
|
||||
# Only pass 'kwargs' to our checkers and ignore custom callable checkers
|
||||
kwargs = {}
|
||||
if wanted_name == 'str' and isinstance(wanted_type, string_types):
|
||||
if wanted_name == 'str' and isinstance(wanted_type, str):
|
||||
kwargs['param'] = list(parameters.keys())[0]
|
||||
|
||||
# Get the name of the parent key if this is a nested option
|
||||
|
|
@ -659,7 +646,7 @@ def _validate_argument_values(argument_spec, parameters, options_context=None, e
|
|||
if choices is None:
|
||||
continue
|
||||
|
||||
if isinstance(choices, (frozenset, KeysView, Sequence)) and not isinstance(choices, (binary_type, text_type)):
|
||||
if isinstance(choices, (frozenset, KeysView, Sequence)) and not isinstance(choices, (bytes, str)):
|
||||
if param in parameters:
|
||||
# Allow one or more when type='list' param with choices
|
||||
if isinstance(parameters[param], list):
|
||||
|
|
@ -745,7 +732,7 @@ def _validate_sub_spec(
|
|||
options_context.append(param)
|
||||
|
||||
# Make sure we can iterate over the elements
|
||||
if not isinstance(parameters[param], Sequence) or isinstance(parameters[param], string_types):
|
||||
if not isinstance(parameters[param], Sequence) or isinstance(parameters[param], str):
|
||||
elements = [parameters[param]]
|
||||
else:
|
||||
elements = parameters[param]
|
||||
|
|
@ -940,3 +927,7 @@ def remove_values(value, no_log_strings):
|
|||
raise TypeError('Unknown container type encountered when removing private values from output')
|
||||
|
||||
return new_value
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "integer_types", "string_types", "PY2", "PY3")
|
||||
|
|
|
|||
|
|
@ -8,11 +8,8 @@ from __future__ import annotations
|
|||
import codecs
|
||||
import json
|
||||
|
||||
from ansible.module_utils.six import (
|
||||
binary_type,
|
||||
iteritems,
|
||||
text_type,
|
||||
)
|
||||
from ansible.module_utils._internal import _no_six
|
||||
|
||||
|
||||
try:
|
||||
codecs.lookup_error('surrogateescape')
|
||||
|
|
@ -90,7 +87,7 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
|
|||
|
||||
Added the ``surrogate_then_replace`` error handler and made it the default error handler.
|
||||
"""
|
||||
if isinstance(obj, binary_type):
|
||||
if isinstance(obj, bytes):
|
||||
return obj
|
||||
|
||||
# We're given a text string
|
||||
|
|
@ -104,7 +101,7 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
|
|||
else:
|
||||
errors = 'replace'
|
||||
|
||||
if isinstance(obj, text_type):
|
||||
if isinstance(obj, str):
|
||||
try:
|
||||
# Try this first as it's the fastest
|
||||
return obj.encode(encoding, errors)
|
||||
|
|
@ -194,7 +191,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
|
|||
|
||||
Added the surrogate_then_replace error handler and made it the default error handler.
|
||||
"""
|
||||
if isinstance(obj, text_type):
|
||||
if isinstance(obj, str):
|
||||
return obj
|
||||
|
||||
if errors in _COMPOSED_ERROR_HANDLERS:
|
||||
|
|
@ -205,7 +202,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
|
|||
else:
|
||||
errors = 'replace'
|
||||
|
||||
if isinstance(obj, binary_type):
|
||||
if isinstance(obj, bytes):
|
||||
# Note: We don't need special handling for surrogate_then_replace
|
||||
# because all bytes will either be made into surrogates or are valid
|
||||
# to decode.
|
||||
|
|
@ -259,10 +256,10 @@ def container_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'):
|
|||
"""
|
||||
# DTFIX-FUTURE: deprecate
|
||||
|
||||
if isinstance(d, text_type):
|
||||
if isinstance(d, str):
|
||||
return to_bytes(d, encoding=encoding, errors=errors)
|
||||
elif isinstance(d, dict):
|
||||
return dict(container_to_bytes(o, encoding, errors) for o in iteritems(d))
|
||||
return dict(container_to_bytes(o, encoding, errors) for o in d.items())
|
||||
elif isinstance(d, list):
|
||||
return [container_to_bytes(o, encoding, errors) for o in d]
|
||||
elif isinstance(d, tuple):
|
||||
|
|
@ -279,14 +276,18 @@ def container_to_text(d, encoding='utf-8', errors='surrogate_or_strict'):
|
|||
"""
|
||||
# DTFIX-FUTURE: deprecate
|
||||
|
||||
if isinstance(d, binary_type):
|
||||
if isinstance(d, bytes):
|
||||
# Warning, can traceback
|
||||
return to_text(d, encoding=encoding, errors=errors)
|
||||
elif isinstance(d, dict):
|
||||
return dict(container_to_text(o, encoding, errors) for o in iteritems(d))
|
||||
return dict(container_to_text(o, encoding, errors) for o in d.items())
|
||||
elif isinstance(d, list):
|
||||
return [container_to_text(o, encoding, errors) for o in d]
|
||||
elif isinstance(d, tuple):
|
||||
return tuple(container_to_text(o, encoding, errors) for o in d)
|
||||
else:
|
||||
return d
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "iteritems")
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from __future__ import annotations
|
|||
|
||||
import re
|
||||
|
||||
from ansible.module_utils.six import iteritems
|
||||
from ansible.module_utils._internal import _no_six
|
||||
|
||||
SIZE_RANGES = {
|
||||
'Y': 1 << 80,
|
||||
|
|
@ -117,7 +117,7 @@ def bytes_to_human(size, isbits=False, unit=None):
|
|||
base = 'bits'
|
||||
suffix = ''
|
||||
|
||||
for suffix, limit in sorted(iteritems(SIZE_RANGES), key=lambda item: -item[1]):
|
||||
for suffix, limit in sorted(SIZE_RANGES.items(), key=lambda item: -item[1]):
|
||||
if (unit is None and size >= limit) or unit is not None and unit.upper() == suffix[0]:
|
||||
break
|
||||
|
||||
|
|
@ -127,3 +127,7 @@ def bytes_to_human(size, isbits=False, unit=None):
|
|||
suffix = base
|
||||
|
||||
return '%.2f %s' % (size / limit, suffix)
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "iteritems")
|
||||
|
|
|
|||
|
|
@ -10,15 +10,13 @@ import os
|
|||
import re
|
||||
|
||||
from ast import literal_eval
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.common import json as _common_json
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.collections import is_iterable
|
||||
from ansible.module_utils.common.text.formatters import human_to_bytes
|
||||
from ansible.module_utils.common.warnings import deprecate
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.module_utils.six import (
|
||||
string_types,
|
||||
)
|
||||
|
||||
|
||||
def count_terms(terms, parameters):
|
||||
|
|
@ -43,7 +41,7 @@ def safe_eval(value, locals=None, include_exceptions=False):
|
|||
version="2.21",
|
||||
)
|
||||
# do not allow method calls to modules
|
||||
if not isinstance(value, string_types):
|
||||
if not isinstance(value, str):
|
||||
# already templated to a datavaluestructure, perhaps?
|
||||
if include_exceptions:
|
||||
return (value, None)
|
||||
|
|
@ -194,7 +192,7 @@ def check_required_by(requirements, parameters, options_context=None):
|
|||
if key not in parameters or parameters[key] is None:
|
||||
continue
|
||||
# Support strings (single-item lists)
|
||||
if isinstance(value, string_types):
|
||||
if isinstance(value, str):
|
||||
value = [value]
|
||||
|
||||
if missing := [required for required in value if required not in parameters or parameters[required] is None]:
|
||||
|
|
@ -373,7 +371,7 @@ def check_type_str(value, allow_conversion=True, param=None, prefix=''):
|
|||
:returns: Original value if it is a string, the value converted to a string
|
||||
if allow_conversion=True, or raises a TypeError if allow_conversion=False.
|
||||
"""
|
||||
if isinstance(value, string_types):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
|
||||
if allow_conversion and value is not None:
|
||||
|
|
@ -403,7 +401,7 @@ def check_type_list(value):
|
|||
return value
|
||||
|
||||
# DTFIX-FUTURE: deprecate legacy comma split functionality, eventually replace with `_check_type_list_strict`
|
||||
if isinstance(value, string_types):
|
||||
if isinstance(value, str):
|
||||
return value.split(",")
|
||||
elif isinstance(value, int) or isinstance(value, float):
|
||||
return [str(value)]
|
||||
|
|
@ -431,7 +429,7 @@ def check_type_dict(value):
|
|||
if isinstance(value, dict):
|
||||
return value
|
||||
|
||||
if isinstance(value, string_types):
|
||||
if isinstance(value, str):
|
||||
if value.startswith("{"):
|
||||
try:
|
||||
return json.loads(value)
|
||||
|
|
@ -494,7 +492,7 @@ def check_type_bool(value):
|
|||
if isinstance(value, bool):
|
||||
return value
|
||||
|
||||
if isinstance(value, string_types) or isinstance(value, (int, float)):
|
||||
if isinstance(value, str) or isinstance(value, (int, float)):
|
||||
return boolean(value)
|
||||
|
||||
raise TypeError('%s cannot be converted to a bool' % type(value))
|
||||
|
|
@ -594,3 +592,7 @@ def check_type_jsonarg(value):
|
|||
return json.dumps(value, cls=_common_json._get_legacy_encoder(), _decode_bytes=True)
|
||||
|
||||
raise TypeError('%s cannot be converted to a json string' % type(value))
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "string_types")
|
||||
|
|
|
|||
|
|
@ -36,9 +36,10 @@ import struct
|
|||
import uuid
|
||||
|
||||
from functools import partial
|
||||
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.module_utils.common.json import _get_legacy_encoder
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
|
||||
def write_to_stream(stream, obj):
|
||||
|
|
@ -95,7 +96,7 @@ class ConnectionError(Exception):
|
|||
|
||||
def __init__(self, message, *args, **kwargs):
|
||||
super(ConnectionError, self).__init__(message)
|
||||
for k, v in iteritems(kwargs):
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
|
||||
|
|
@ -149,7 +150,7 @@ class Connection(object):
|
|||
raise ConnectionError(
|
||||
"Unable to decode JSON from response to {0}. Received '{1}'.".format(name, out)
|
||||
)
|
||||
params = [repr(arg) for arg in args] + ['{0}={1!r}'.format(k, v) for k, v in iteritems(kwargs)]
|
||||
params = [repr(arg) for arg in args] + ['{0}={1!r}'.format(k, v) for k, v in kwargs.items()]
|
||||
params = ', '.join(params)
|
||||
raise ConnectionError(
|
||||
"Unable to decode JSON from response to {0}({1}). Received '{2}'.".format(name, params, out)
|
||||
|
|
@ -200,3 +201,7 @@ class Connection(object):
|
|||
sf.close()
|
||||
|
||||
return to_text(response, errors='surrogate_or_strict')
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "iteritems")
|
||||
|
|
|
|||
|
|
@ -24,13 +24,13 @@ import re
|
|||
import sys
|
||||
import time
|
||||
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils._internal._concurrent import _futures
|
||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.common.text.formatters import bytes_to_human
|
||||
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
||||
from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
# import this as a module to ensure we get the same module instance
|
||||
from ansible.module_utils.facts import timeout
|
||||
|
|
@ -653,7 +653,7 @@ class LinuxHardware(Hardware):
|
|||
retval[target].add(entry)
|
||||
except OSError:
|
||||
continue
|
||||
return dict((k, list(sorted(v))) for (k, v) in iteritems(retval))
|
||||
return dict((k, list(sorted(v))) for (k, v) in retval.items())
|
||||
except OSError:
|
||||
return {}
|
||||
|
||||
|
|
@ -665,7 +665,7 @@ class LinuxHardware(Hardware):
|
|||
device = elements[3]
|
||||
target = elements[5]
|
||||
retval[target].add(device)
|
||||
return dict((k, list(sorted(v))) for (k, v) in iteritems(retval))
|
||||
return dict((k, list(sorted(v))) for (k, v) in retval.items())
|
||||
except OSError:
|
||||
return {}
|
||||
|
||||
|
|
@ -750,7 +750,7 @@ class LinuxHardware(Hardware):
|
|||
d = {}
|
||||
d['virtual'] = virtual
|
||||
d['links'] = {}
|
||||
for (link_type, link_values) in iteritems(links):
|
||||
for (link_type, link_values) in links.items():
|
||||
d['links'][link_type] = link_values.get(block, [])
|
||||
diskname = os.path.basename(sysdir)
|
||||
for key in ['vendor', 'model', 'sas_address', 'sas_device_handle']:
|
||||
|
|
@ -801,7 +801,7 @@ class LinuxHardware(Hardware):
|
|||
part_sysdir = sysdir + "/" + partname
|
||||
|
||||
part['links'] = {}
|
||||
for (link_type, link_values) in iteritems(links):
|
||||
for (link_type, link_values) in links.items():
|
||||
part['links'][link_type] = link_values.get(partname, [])
|
||||
|
||||
part['start'] = get_file_content(part_sysdir + "/start", 0)
|
||||
|
|
@ -925,3 +925,7 @@ class LinuxHardwareCollector(HardwareCollector):
|
|||
_fact_class = LinuxHardware
|
||||
|
||||
required_facts = set(['platform'])
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "iteritems")
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import os
|
|||
import re
|
||||
import time
|
||||
|
||||
from ansible.module_utils.six.moves import reduce
|
||||
from functools import reduce
|
||||
|
||||
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
||||
from ansible.module_utils.facts.timeout import TimeoutError, timeout
|
||||
|
|
|
|||
|
|
@ -18,12 +18,13 @@ from __future__ import annotations
|
|||
import re
|
||||
import time
|
||||
|
||||
from functools import reduce
|
||||
|
||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||
from ansible.module_utils.common.text.formatters import bytes_to_human
|
||||
from ansible.module_utils.facts.utils import get_file_content, get_mount_size
|
||||
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
|
||||
from ansible.module_utils.facts import timeout
|
||||
from ansible.module_utils.six.moves import reduce
|
||||
|
||||
|
||||
class SunOSHardware(Hardware):
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import ansible.module_utils.compat.typing as t
|
|||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
from ansible.module_utils.six import with_metaclass
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.basic import missing_required_lib
|
||||
from ansible.module_utils.common.process import get_bin_path
|
||||
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
||||
|
|
@ -19,7 +19,7 @@ def get_all_pkg_managers():
|
|||
return {obj.__name__.lower(): obj for obj in get_all_subclasses(PkgMgr) if obj not in (CLIMgr, LibMgr, RespawningLibMgr)}
|
||||
|
||||
|
||||
class PkgMgr(with_metaclass(ABCMeta, object)): # type: ignore[misc]
|
||||
class PkgMgr(metaclass=ABCMeta):
|
||||
|
||||
@abstractmethod
|
||||
def is_available(self, handle_exceptions):
|
||||
|
|
@ -125,3 +125,7 @@ class CLIMgr(PkgMgr):
|
|||
if not handle_exceptions:
|
||||
raise
|
||||
return found
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "with_metaclass")
|
||||
|
|
|
|||
|
|
@ -18,8 +18,7 @@ from __future__ import annotations
|
|||
import os
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.facts.collector import BaseFactCollector
|
||||
|
||||
|
||||
|
|
@ -31,7 +30,11 @@ class EnvFactCollector(BaseFactCollector):
|
|||
env_facts = {}
|
||||
env_facts['env'] = {}
|
||||
|
||||
for k, v in iteritems(os.environ):
|
||||
for k, v in os.environ.items():
|
||||
env_facts['env'][k] = v
|
||||
|
||||
return env_facts
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "iteritems")
|
||||
|
|
|
|||
|
|
@ -3,16 +3,18 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import configparser
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import stat
|
||||
import typing as t
|
||||
|
||||
from io import StringIO
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.facts.utils import get_file_content
|
||||
from ansible.module_utils.facts.collector import BaseFactCollector
|
||||
from ansible.module_utils.six.moves import configparser, StringIO
|
||||
|
||||
|
||||
class LocalFactCollector(BaseFactCollector):
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
|||
|
||||
import collections.abc as c
|
||||
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
|
||||
|
|
@ -20,7 +20,7 @@ def boolean(value, strict=True):
|
|||
|
||||
normalized_value = value
|
||||
|
||||
if isinstance(value, (text_type, binary_type)):
|
||||
if isinstance(value, (str, bytes)):
|
||||
normalized_value = to_text(value, errors='surrogate_or_strict').lower().strip()
|
||||
|
||||
if not isinstance(value, c.Hashable):
|
||||
|
|
@ -32,3 +32,7 @@ def boolean(value, strict=True):
|
|||
return False
|
||||
|
||||
raise TypeError("The value '%s' is not a valid boolean. Valid booleans include: %s" % (to_text(value), ', '.join(repr(i) for i in BOOLEANS)))
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type")
|
||||
|
|
|
|||
|
|
@ -36,7 +36,6 @@ import select
|
|||
import shlex
|
||||
import subprocess
|
||||
|
||||
from ansible.module_utils.six import b
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
|
||||
|
||||
|
|
@ -200,7 +199,7 @@ def daemonize(module, cmd):
|
|||
fds = [p.stdout, p.stderr]
|
||||
|
||||
# loop reading output till it is done
|
||||
output = {p.stdout: b(""), p.stderr: b("")}
|
||||
output = {p.stdout: b"", p.stderr: b""}
|
||||
while fds:
|
||||
rfd, wfd, efd = select.select(fds, [], fds, 1)
|
||||
if (rfd + wfd + efd) or p.poll() is None:
|
||||
|
|
@ -234,7 +233,7 @@ def daemonize(module, cmd):
|
|||
os.waitpid(pid, 0)
|
||||
|
||||
# Grab response data after child finishes
|
||||
return_data = b("")
|
||||
return_data = b""
|
||||
while True:
|
||||
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
|
||||
if pipe[0] in rfd:
|
||||
|
|
|
|||
|
|
@ -383,7 +383,6 @@ from ansible.module_utils.common.file import S_IRWXU_RXG_RXO
|
|||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.urls import fetch_file
|
||||
|
||||
DPKG_OPTIONS = 'force-confdef,force-confold'
|
||||
|
|
@ -633,7 +632,7 @@ def expand_pkgspec_from_fnmatches(m, pkgspec, cache):
|
|||
if pkgspec:
|
||||
for pkgspec_pattern in pkgspec:
|
||||
|
||||
if not isinstance(pkgspec_pattern, string_types):
|
||||
if not isinstance(pkgspec_pattern, str):
|
||||
m.fail_json(msg="Invalid type for package name, expected string but got %s" % type(pkgspec_pattern))
|
||||
|
||||
pkgname_pattern, version_cmp, version = package_split(pkgspec_pattern)
|
||||
|
|
|
|||
|
|
@ -131,7 +131,6 @@ import re
|
|||
import tempfile
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six import b, indexbytes
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
|
||||
|
|
@ -141,6 +140,7 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno
|
|||
tmp = os.fdopen(tmpfd, 'wb')
|
||||
delimit_me = False
|
||||
add_newline = False
|
||||
b_linesep = os.linesep.encode()
|
||||
|
||||
for f in sorted(os.listdir(src_path)):
|
||||
if compiled_regexp and not compiled_regexp.search(f):
|
||||
|
|
@ -153,7 +153,7 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno
|
|||
|
||||
# always put a newline between fragments if the previous fragment didn't end with a newline.
|
||||
if add_newline:
|
||||
tmp.write(b('\n'))
|
||||
tmp.write(b_linesep)
|
||||
|
||||
# delimiters should only appear between fragments
|
||||
if delimit_me:
|
||||
|
|
@ -163,16 +163,12 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno
|
|||
tmp.write(delimiter)
|
||||
# always make sure there's a newline after the
|
||||
# delimiter, so lines don't run together
|
||||
|
||||
# byte indexing differs on Python 2 and 3,
|
||||
# use indexbytes for compat
|
||||
# chr(10) == '\n'
|
||||
if indexbytes(delimiter, -1) != 10:
|
||||
tmp.write(b('\n'))
|
||||
if not delimiter.endswith(b_linesep):
|
||||
tmp.write(b_linesep)
|
||||
|
||||
tmp.write(fragment_content)
|
||||
delimit_me = True
|
||||
if fragment_content.endswith(b('\n')):
|
||||
if fragment_content.endswith(b_linesep):
|
||||
add_newline = False
|
||||
else:
|
||||
add_newline = True
|
||||
|
|
|
|||
|
|
@ -192,7 +192,6 @@ EXAMPLES = r"""
|
|||
import re
|
||||
import os
|
||||
import tempfile
|
||||
from ansible.module_utils.six import b
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||
|
||||
|
|
@ -291,7 +290,8 @@ def main():
|
|||
block = to_bytes(params['block'])
|
||||
marker = to_bytes(params['marker'])
|
||||
present = params['state'] == 'present'
|
||||
blank_line = [b(os.linesep)]
|
||||
b_linesep = os.linesep.encode()
|
||||
blank_line = [b_linesep]
|
||||
|
||||
if not present and not path_exists:
|
||||
module.exit_json(changed=False, msg="File %s not present" % path)
|
||||
|
|
@ -306,11 +306,11 @@ def main():
|
|||
else:
|
||||
insertre = None
|
||||
|
||||
marker0 = re.sub(b(r'{mark}'), b(params['marker_begin']), marker) + b(os.linesep)
|
||||
marker1 = re.sub(b(r'{mark}'), b(params['marker_end']), marker) + b(os.linesep)
|
||||
marker0 = re.sub(r'{mark}'.encode(), to_bytes(params['marker_begin']), marker) + b_linesep
|
||||
marker1 = re.sub(r'{mark}'.encode(), to_bytes(params['marker_end']), marker) + b_linesep
|
||||
if present and block:
|
||||
if not block.endswith(b(os.linesep)):
|
||||
block += b(os.linesep)
|
||||
if not block.endswith(b_linesep):
|
||||
block += b_linesep
|
||||
blocklines = [marker0] + block.splitlines(True) + [marker1]
|
||||
else:
|
||||
blocklines = []
|
||||
|
|
@ -352,15 +352,15 @@ def main():
|
|||
|
||||
# Ensure there is a line separator before the block of lines to be inserted
|
||||
if n0 > 0:
|
||||
if not lines[n0 - 1].endswith(b(os.linesep)):
|
||||
lines[n0 - 1] += b(os.linesep)
|
||||
if not lines[n0 - 1].endswith(b_linesep):
|
||||
lines[n0 - 1] += b_linesep
|
||||
|
||||
# Before the block: check if we need to prepend a blank line
|
||||
# If yes, we need to add the blank line if we are not at the beginning of the file
|
||||
# and the previous line is not a blank line
|
||||
# In both cases, we need to shift by one on the right the inserting position of the block
|
||||
if params['prepend_newline'] and present:
|
||||
if n0 != 0 and lines[n0 - 1] != b(os.linesep):
|
||||
if n0 != 0 and lines[n0 - 1] != b_linesep:
|
||||
lines[n0:n0] = blank_line
|
||||
n0 += 1
|
||||
|
||||
|
|
@ -372,7 +372,7 @@ def main():
|
|||
# and the line right after is not a blank line
|
||||
if params['append_newline'] and present:
|
||||
line_after_block = n0 + len(blocklines)
|
||||
if line_after_block < len(lines) and lines[line_after_block] != b(os.linesep):
|
||||
if line_after_block < len(lines) and lines[line_after_block] != b_linesep:
|
||||
lines[line_after_block:line_after_block] = blank_line
|
||||
|
||||
if lines:
|
||||
|
|
|
|||
|
|
@ -219,13 +219,13 @@ import os
|
|||
import platform
|
||||
import pwd
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.common.file import S_IRWU_RWG_RWO
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
|
||||
|
||||
class CronTabError(Exception):
|
||||
|
|
@ -529,13 +529,13 @@ class CronTab(object):
|
|||
user = ''
|
||||
if self.user:
|
||||
if platform.system() == 'SunOS':
|
||||
return "su %s -c '%s -l'" % (shlex_quote(self.user), shlex_quote(self.cron_cmd))
|
||||
return "su %s -c '%s -l'" % (shlex.quote(self.user), shlex.quote(self.cron_cmd))
|
||||
elif platform.system() == 'AIX':
|
||||
return "%s -l %s" % (shlex_quote(self.cron_cmd), shlex_quote(self.user))
|
||||
return "%s -l %s" % (shlex.quote(self.cron_cmd), shlex.quote(self.user))
|
||||
elif platform.system() == 'HP-UX':
|
||||
return "%s %s %s" % (self.cron_cmd, '-l', shlex_quote(self.user))
|
||||
return "%s %s %s" % (self.cron_cmd, '-l', shlex.quote(self.user))
|
||||
elif pwd.getpwuid(os.getuid())[0] != self.user:
|
||||
user = '-u %s' % shlex_quote(self.user)
|
||||
user = '-u %s' % shlex.quote(self.user)
|
||||
return "%s %s %s" % (self.cron_cmd, user, '-l')
|
||||
|
||||
def _write_execute(self, path):
|
||||
|
|
@ -546,10 +546,10 @@ class CronTab(object):
|
|||
if self.user:
|
||||
if platform.system() in ['SunOS', 'HP-UX', 'AIX']:
|
||||
return "chown %s %s ; su '%s' -c '%s %s'" % (
|
||||
shlex_quote(self.user), shlex_quote(path), shlex_quote(self.user), self.cron_cmd, shlex_quote(path))
|
||||
shlex.quote(self.user), shlex.quote(path), shlex.quote(self.user), self.cron_cmd, shlex.quote(path))
|
||||
elif pwd.getpwuid(os.getuid())[0] != self.user:
|
||||
user = '-u %s' % shlex_quote(self.user)
|
||||
return "%s %s %s" % (self.cron_cmd, user, shlex_quote(path))
|
||||
user = '-u %s' % shlex.quote(self.user)
|
||||
return "%s %s %s" % (self.cron_cmd, user, shlex.quote(path))
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
|||
|
|
@ -250,7 +250,6 @@ from ansible.module_utils.common.file import S_IRWXU_RXG_RXO, S_IRWU_RG_RO
|
|||
from ansible.module_utils.common.respawn import has_respawned, probe_interpreters_for_module, respawn_module
|
||||
from ansible.module_utils.common.text.converters import to_bytes
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.six import raise_from # type: ignore[attr-defined]
|
||||
from ansible.module_utils.urls import generic_urlparse
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.module_utils.urls import get_user_agent
|
||||
|
|
@ -339,7 +338,7 @@ def write_signed_by_key(module, v, slug):
|
|||
try:
|
||||
r = open_url(v, http_agent=get_user_agent())
|
||||
except Exception as exc:
|
||||
raise_from(RuntimeError(to_native(exc)), exc)
|
||||
raise RuntimeError('Could not fetch signed_by key.') from exc
|
||||
else:
|
||||
b_data = r.read()
|
||||
else:
|
||||
|
|
@ -587,14 +586,9 @@ def main():
|
|||
elif is_sequence(value):
|
||||
value = format_list(value)
|
||||
elif key == 'signed_by':
|
||||
try:
|
||||
key_changed, signed_by_filename, signed_by_data = write_signed_by_key(module, value, slug)
|
||||
value = signed_by_filename or signed_by_data
|
||||
changed |= key_changed
|
||||
except RuntimeError as exc:
|
||||
module.fail_json(
|
||||
msg='Could not fetch signed_by key: %s' % to_native(exc)
|
||||
)
|
||||
key_changed, signed_by_filename, signed_by_data = write_signed_by_key(module, value, slug)
|
||||
value = signed_by_filename or signed_by_data
|
||||
changed |= key_changed
|
||||
|
||||
if value.count('\n') > 0:
|
||||
value = format_multiline(value)
|
||||
|
|
|
|||
|
|
@ -291,7 +291,6 @@ import time
|
|||
|
||||
from ansible.module_utils.common.text.converters import to_text, to_native
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six import string_types
|
||||
|
||||
|
||||
class _Object:
|
||||
|
|
@ -496,7 +495,7 @@ def main():
|
|||
|
||||
params = module.params
|
||||
|
||||
if params['mode'] and not isinstance(params['mode'], string_types):
|
||||
if params['mode'] and not isinstance(params['mode'], str):
|
||||
module.fail_json(
|
||||
msg="argument 'mode' is not a string and conversion is not allowed, value is of type %s" % params['mode'].__class__.__name__
|
||||
)
|
||||
|
|
|
|||
|
|
@ -374,9 +374,9 @@ import shutil
|
|||
import tempfile
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlsplit
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.urls import fetch_url, url_argument_spec
|
||||
|
||||
|
|
|
|||
|
|
@ -343,7 +343,6 @@ from ansible.module_utils.common.text.converters import to_native, to_text
|
|||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||
from ansible.module_utils.common.process import get_bin_path
|
||||
from ansible.module_utils.six import b, string_types
|
||||
|
||||
|
||||
def relocate_repo(module, result, repo_dir, old_repo_dir, worktree_dir):
|
||||
|
|
@ -443,12 +442,12 @@ def write_ssh_wrapper(module):
|
|||
fd, wrapper_path = tempfile.mkstemp()
|
||||
|
||||
# use existing git_ssh/ssh_command, fallback to 'ssh'
|
||||
template = b("""#!/bin/sh
|
||||
template = """#!/bin/sh
|
||||
%s $GIT_SSH_OPTS "$@"
|
||||
""" % os.environ.get('GIT_SSH', os.environ.get('GIT_SSH_COMMAND', 'ssh')))
|
||||
""" % os.environ.get('GIT_SSH', os.environ.get('GIT_SSH_COMMAND', 'ssh'))
|
||||
|
||||
# write it
|
||||
with os.fdopen(fd, 'w+b') as fh:
|
||||
with os.fdopen(fd, 'w') as fh:
|
||||
fh.write(template)
|
||||
|
||||
# set execute
|
||||
|
|
@ -1257,7 +1256,7 @@ def main():
|
|||
|
||||
# evaluate and set the umask before doing anything else
|
||||
if umask is not None:
|
||||
if not isinstance(umask, string_types):
|
||||
if not isinstance(umask, str):
|
||||
module.fail_json(msg="umask must be defined as a quoted octal integer")
|
||||
try:
|
||||
umask = int(umask, 8)
|
||||
|
|
|
|||
|
|
@ -180,7 +180,6 @@ from ansible.module_utils.basic import AnsibleModule
|
|||
from ansible.module_utils.common.locale import get_best_parsable_locale
|
||||
from ansible.module_utils.common.sys_info import get_platform_subclass
|
||||
from ansible.module_utils.service import fail_if_missing, is_systemd_managed
|
||||
from ansible.module_utils.six import b
|
||||
|
||||
|
||||
class Service(object):
|
||||
|
|
@ -292,8 +291,8 @@ class Service(object):
|
|||
# chkconfig localizes messages and we're screen scraping so make
|
||||
# sure we use the C locale
|
||||
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=lang_env, preexec_fn=lambda: os.close(pipe[1]))
|
||||
stdout = b("")
|
||||
stderr = b("")
|
||||
stdout = b""
|
||||
stderr = b""
|
||||
fds = [p.stdout, p.stderr]
|
||||
# Wait for all output, or until the main process is dead and its output is done.
|
||||
while fds:
|
||||
|
|
@ -322,7 +321,7 @@ class Service(object):
|
|||
os.close(pipe[1])
|
||||
os.waitpid(pid, 0)
|
||||
# Wait for data from daemon process and process it.
|
||||
data = b("")
|
||||
data = b""
|
||||
while True:
|
||||
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
|
||||
if pipe[0] in rfd:
|
||||
|
|
|
|||
|
|
@ -438,13 +438,12 @@ import os
|
|||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
from collections.abc import Mapping, Sequence
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, sanitize_keys
|
||||
from ansible.module_utils.six import binary_type, iteritems, string_types
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode, urljoin
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.six.moves.collections_abc import Mapping, Sequence
|
||||
from ansible.module_utils.urls import (
|
||||
fetch_url,
|
||||
get_response_filename,
|
||||
|
|
@ -479,7 +478,7 @@ def write_file(module, dest, content, resp):
|
|||
try:
|
||||
fd, tmpsrc = tempfile.mkstemp(dir=module.tmpdir)
|
||||
with os.fdopen(fd, 'wb') as f:
|
||||
if isinstance(content, binary_type):
|
||||
if isinstance(content, bytes):
|
||||
f.write(content)
|
||||
else:
|
||||
shutil.copyfileobj(content, f)
|
||||
|
|
@ -521,14 +520,14 @@ def kv_list(data):
|
|||
|
||||
def form_urlencoded(body):
|
||||
""" Convert data into a form-urlencoded string """
|
||||
if isinstance(body, string_types):
|
||||
if isinstance(body, str):
|
||||
return body
|
||||
|
||||
if isinstance(body, (Mapping, Sequence)):
|
||||
result = []
|
||||
# Turn a list of lists into a list of tuples that urlencode accepts
|
||||
for key, values in kv_list(body):
|
||||
if isinstance(values, string_types) or not isinstance(values, (Mapping, Sequence)):
|
||||
if isinstance(values, str) or not isinstance(values, (Mapping, Sequence)):
|
||||
values = [values]
|
||||
for value in values:
|
||||
if value is not None:
|
||||
|
|
@ -641,12 +640,12 @@ def main():
|
|||
|
||||
if body_format == 'json':
|
||||
# Encode the body unless its a string, then assume it is pre-formatted JSON
|
||||
if not isinstance(body, string_types):
|
||||
if not isinstance(body, str):
|
||||
body = json.dumps(body)
|
||||
if 'content-type' not in [header.lower() for header in dict_headers]:
|
||||
dict_headers['Content-Type'] = 'application/json'
|
||||
elif body_format == 'form-urlencoded':
|
||||
if not isinstance(body, string_types):
|
||||
if not isinstance(body, str):
|
||||
try:
|
||||
body = form_urlencoded(body)
|
||||
except ValueError as e:
|
||||
|
|
@ -747,7 +746,7 @@ def main():
|
|||
# In python3, the headers are title cased. Lowercase them to be
|
||||
# compatible with the python2 behaviour.
|
||||
uresp = {}
|
||||
for key, value in iteritems(resp):
|
||||
for key, value in resp.items():
|
||||
ukey = key.replace("-", "_").lower()
|
||||
uresp[ukey] = value
|
||||
|
||||
|
|
@ -755,7 +754,7 @@ def main():
|
|||
uresp['location'] = urljoin(url, uresp['location'])
|
||||
|
||||
# Default content_encoding to try
|
||||
if isinstance(content, binary_type):
|
||||
if isinstance(content, bytes):
|
||||
u_content = to_text(content, encoding=content_encoding)
|
||||
if maybe_json:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ from ansible._internal._errors import _error_utils
|
|||
from ansible.module_utils.basic import is_executable
|
||||
from ansible._internal._datatag._tags import Origin, TrustedAsTemplate, SourceWasEncrypted
|
||||
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.parsing.quoting import unquote
|
||||
from ansible.parsing.utils.yaml import from_yaml
|
||||
|
|
@ -418,7 +417,7 @@ class DataLoader:
|
|||
Temporary files are cleanup in the destructor
|
||||
"""
|
||||
|
||||
if not file_path or not isinstance(file_path, (binary_type, text_type)):
|
||||
if not file_path or not isinstance(file_path, (bytes, str)):
|
||||
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))
|
||||
|
||||
b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
|
||||
|
|
|
|||
|
|
@ -59,7 +59,6 @@ except ImportError:
|
|||
|
||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||
from ansible import constants as C
|
||||
from ansible.module_utils.six import binary_type
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.path import makedirs_safe, unfrackpath
|
||||
|
|
@ -1237,7 +1236,7 @@ class VaultAES256:
|
|||
|
||||
It would be nice if there were a library for this but hey.
|
||||
"""
|
||||
if not (isinstance(b_a, binary_type) and isinstance(b_b, binary_type)):
|
||||
if not (isinstance(b_a, bytes) and isinstance(b_b, bytes)):
|
||||
raise TypeError('_is_equal can only be used to compare two byte strings')
|
||||
|
||||
# http://codahale.com/a-lesson-in-timing-attacks/
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ from ansible import context
|
|||
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError, AnsibleValueOmittedError, AnsibleFieldAttributeError
|
||||
from ansible.module_utils.datatag import native_type_name
|
||||
from ansible._internal._datatag._tags import Origin
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.module_utils.common.sentinel import Sentinel
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
|
@ -37,7 +36,7 @@ display = Display()
|
|||
def _validate_action_group_metadata(action, found_group_metadata, fq_group_name):
|
||||
valid_metadata = {
|
||||
'extend_group': {
|
||||
'types': (list, string_types,),
|
||||
'types': (list, str,),
|
||||
'errortype': 'list',
|
||||
},
|
||||
}
|
||||
|
|
@ -204,7 +203,7 @@ class FieldAttributeBase:
|
|||
value = self.set_to_context(attr.name)
|
||||
|
||||
valid_values = frozenset(('always', 'on_failed', 'on_unreachable', 'on_skipped', 'never'))
|
||||
if value and isinstance(value, string_types) and value not in valid_values:
|
||||
if value and isinstance(value, str) and value not in valid_values:
|
||||
raise AnsibleParserError("'%s' is not a valid value for debugger. Must be one of %s" % (value, ', '.join(valid_values)), obj=self.get_ds())
|
||||
return value
|
||||
|
||||
|
|
@ -350,14 +349,14 @@ class FieldAttributeBase:
|
|||
found_group_metadata = False
|
||||
for action in action_group:
|
||||
# Everything should be a string except the metadata entry
|
||||
if not isinstance(action, string_types):
|
||||
if not isinstance(action, str):
|
||||
_validate_action_group_metadata(action, found_group_metadata, fq_group_name)
|
||||
|
||||
if isinstance(action['metadata'], dict):
|
||||
found_group_metadata = True
|
||||
|
||||
include_groups = action['metadata'].get('extend_group', [])
|
||||
if isinstance(include_groups, string_types):
|
||||
if isinstance(include_groups, str):
|
||||
include_groups = [include_groups]
|
||||
if not isinstance(include_groups, list):
|
||||
# Bad entries may be a warning above, but prevent tracebacks by setting it back to the acceptable type.
|
||||
|
|
@ -472,7 +471,7 @@ class FieldAttributeBase:
|
|||
elif attribute.isa == 'percent':
|
||||
# special value, which may be an integer or float
|
||||
# with an optional '%' at the end
|
||||
if isinstance(value, string_types) and '%' in value:
|
||||
if isinstance(value, str) and '%' in value:
|
||||
value = value.replace('%', '')
|
||||
value = float(value)
|
||||
elif attribute.isa == 'list':
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.playbook.attribute import FieldAttribute
|
||||
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||
from ansible.utils.display import Display
|
||||
|
|
@ -32,7 +31,7 @@ def _ensure_default_collection(collection_list=None):
|
|||
class CollectionSearch:
|
||||
|
||||
# this needs to be populated before we can resolve tasks/roles/etc
|
||||
collections = FieldAttribute(isa='list', listof=string_types, priority=100, default=_ensure_default_collection, always_post_validate=True, static=True)
|
||||
collections = FieldAttribute(isa='list', listof=(str,), priority=100, default=_ensure_default_collection, always_post_validate=True, static=True)
|
||||
|
||||
def _load_collections(self, attr, ds):
|
||||
# We are always a mixin with Base, so we can validate this untemplated
|
||||
|
|
|
|||
|
|
@ -20,12 +20,11 @@ from __future__ import annotations
|
|||
from ansible.errors import AnsibleAssertionError
|
||||
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
||||
from ansible.playbook.task import Task
|
||||
from ansible.module_utils.six import string_types
|
||||
|
||||
|
||||
class Handler(Task):
|
||||
|
||||
listen = NonInheritableFieldAttribute(isa='list', default=list, listof=string_types, static=True)
|
||||
listen = NonInheritableFieldAttribute(isa='list', default=list, listof=(str,), static=True)
|
||||
|
||||
def __init__(self, block=None, role=None, task_include=None):
|
||||
self.notified_hosts = []
|
||||
|
|
|
|||
|
|
@ -22,7 +22,6 @@ from ansible import context
|
|||
from ansible.errors import AnsibleError
|
||||
from ansible.errors import AnsibleParserError, AnsibleAssertionError
|
||||
from ansible.module_utils.common.collections import is_sequence
|
||||
from ansible.module_utils.six import binary_type, string_types, text_type
|
||||
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
||||
from ansible.playbook.base import Base
|
||||
from ansible.playbook.block import Block
|
||||
|
|
@ -53,11 +52,11 @@ class Play(Base, Taggable, CollectionSearch):
|
|||
"""
|
||||
|
||||
# =================================================================================
|
||||
hosts = NonInheritableFieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True, priority=-2)
|
||||
hosts = NonInheritableFieldAttribute(isa='list', required=True, listof=(str,), always_post_validate=True, priority=-2)
|
||||
|
||||
# Facts
|
||||
gather_facts = NonInheritableFieldAttribute(isa='bool', default=None, always_post_validate=True)
|
||||
gather_subset = NonInheritableFieldAttribute(isa='list', default=None, listof=string_types, always_post_validate=True)
|
||||
gather_subset = NonInheritableFieldAttribute(isa='list', default=None, listof=(str,), always_post_validate=True)
|
||||
gather_timeout = NonInheritableFieldAttribute(isa='int', default=None, always_post_validate=True)
|
||||
fact_path = NonInheritableFieldAttribute(isa='string', default=None)
|
||||
|
||||
|
|
@ -120,10 +119,10 @@ class Play(Base, Taggable, CollectionSearch):
|
|||
for entry in value:
|
||||
if entry is None:
|
||||
raise AnsibleParserError("Hosts list cannot contain values of 'None'. Please check your playbook")
|
||||
elif not isinstance(entry, (binary_type, text_type)):
|
||||
elif not isinstance(entry, (bytes, str)):
|
||||
raise AnsibleParserError("Hosts list contains an invalid host value: '{host!s}'".format(host=entry))
|
||||
|
||||
elif not isinstance(value, (binary_type, text_type, EncryptedString)):
|
||||
elif not isinstance(value, (bytes, str, EncryptedString)):
|
||||
raise AnsibleParserError("Hosts list must be a sequence or string. Please check your playbook.")
|
||||
|
||||
def get_name(self):
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ from ansible import constants as C
|
|||
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleAssertionError
|
||||
from ansible.module_utils.common.sentinel import Sentinel
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
from ansible.playbook.base import Base
|
||||
from ansible.playbook.collectionsearch import CollectionSearch
|
||||
from ansible.playbook.conditional import Conditional
|
||||
|
|
@ -74,7 +73,7 @@ def hash_params(params):
|
|||
# Any container is unhashable if it contains unhashable items (for
|
||||
# instance, tuple() is a Hashable subclass but if it contains a dict, it
|
||||
# cannot be hashed)
|
||||
if isinstance(params, Container) and not isinstance(params, (text_type, binary_type)):
|
||||
if isinstance(params, Container) and not isinstance(params, (str, bytes)):
|
||||
if isinstance(params, Mapping):
|
||||
try:
|
||||
# Optimistically hope the contents are all hashable
|
||||
|
|
|
|||
|
|
@ -22,7 +22,6 @@ import os
|
|||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
||||
from ansible.playbook.base import Base
|
||||
from ansible.playbook.collectionsearch import CollectionSearch
|
||||
|
|
@ -70,7 +69,7 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch):
|
|||
if isinstance(ds, int):
|
||||
ds = "%s" % ds
|
||||
|
||||
if not isinstance(ds, dict) and not isinstance(ds, string_types):
|
||||
if not isinstance(ds, dict) and not isinstance(ds, str):
|
||||
raise AnsibleAssertionError()
|
||||
|
||||
if isinstance(ds, dict):
|
||||
|
|
@ -113,11 +112,11 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch):
|
|||
string), just that string
|
||||
"""
|
||||
|
||||
if isinstance(ds, string_types):
|
||||
if isinstance(ds, str):
|
||||
return ds
|
||||
|
||||
role_name = ds.get('role', ds.get('name'))
|
||||
if not role_name or not isinstance(role_name, string_types):
|
||||
if not role_name or not isinstance(role_name, str):
|
||||
raise AnsibleError('role definitions must contain a role name', obj=ds)
|
||||
|
||||
# if we have the required datastructures, and if the role_name
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.playbook.delegatable import Delegatable
|
||||
from ansible.playbook.role.definition import RoleDefinition
|
||||
|
||||
|
|
@ -40,10 +39,10 @@ class RoleInclude(RoleDefinition, Delegatable):
|
|||
@staticmethod
|
||||
def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None, collection_list=None):
|
||||
|
||||
if not (isinstance(data, string_types) or isinstance(data, dict)):
|
||||
if not (isinstance(data, str) or isinstance(data, dict)):
|
||||
raise AnsibleParserError("Invalid role definition.", obj=data)
|
||||
|
||||
if isinstance(data, string_types) and ',' in data:
|
||||
if isinstance(data, str) and ',' in data:
|
||||
raise AnsibleError("Invalid old style role requirement: %s" % data)
|
||||
|
||||
ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader, collection_list=collection_list)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ from __future__ import annotations
|
|||
import os
|
||||
|
||||
from ansible.errors import AnsibleParserError, AnsibleError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.playbook.attribute import NonInheritableFieldAttribute
|
||||
from ansible.playbook.base import Base
|
||||
from ansible.playbook.collectionsearch import CollectionSearch
|
||||
|
|
@ -70,7 +69,7 @@ class RoleMetadata(Base, CollectionSearch):
|
|||
|
||||
for role_def in ds:
|
||||
# FIXME: consolidate with ansible-galaxy to keep this in sync
|
||||
if isinstance(role_def, string_types) or 'role' in role_def or 'name' in role_def:
|
||||
if isinstance(role_def, str) or 'role' in role_def or 'name' in role_def:
|
||||
roles.append(role_def)
|
||||
continue
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.playbook.role.definition import RoleDefinition
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.galaxy import scm_archive_resource
|
||||
|
|
@ -65,7 +64,7 @@ class RoleRequirement(RoleDefinition):
|
|||
@staticmethod
|
||||
def role_yaml_parse(role):
|
||||
|
||||
if isinstance(role, string_types):
|
||||
if isinstance(role, str):
|
||||
name = None
|
||||
scm = None
|
||||
src = None
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ from ansible.playbook.task_include import TaskInclude
|
|||
from ansible.playbook.role import Role
|
||||
from ansible.playbook.role.include import RoleInclude
|
||||
from ansible.utils.display import Display
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible._internal._templating._engine import TemplateEngine
|
||||
|
||||
__all__ = ['IncludeRole']
|
||||
|
|
@ -137,7 +136,7 @@ class IncludeRole(TaskInclude):
|
|||
for key in my_arg_names.intersection(IncludeRole.FROM_ARGS):
|
||||
from_key = key.removesuffix('_from')
|
||||
args_value = ir.args.get(key)
|
||||
if not isinstance(args_value, string_types):
|
||||
if not isinstance(args_value, str):
|
||||
raise AnsibleParserError('Expected a string for %s but got %s instead' % (key, type(args_value)))
|
||||
ir._from_files[from_key] = args_value
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVar
|
|||
from ansible.executor.module_common import _get_action_arg_defaults
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils._internal._datatag import AnsibleTagHelper
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.mod_args import ModuleArgsParser, RAW_PARAM_MODULES
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.plugins.loader import action_loader, module_loader, lookup_loader
|
||||
|
|
@ -161,7 +160,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch, Notifiable, Delegatabl
|
|||
def _merge_kv(self, ds):
|
||||
if ds is None:
|
||||
return ""
|
||||
elif isinstance(ds, string_types):
|
||||
elif isinstance(ds, str):
|
||||
return ds
|
||||
elif isinstance(ds, dict):
|
||||
buf = ""
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ from ansible.module_utils.common.arg_spec import ArgumentSpecValidator
|
|||
from ansible.module_utils.errors import UnsupportedError
|
||||
from ansible.module_utils.json_utils import _filter_non_json_lines
|
||||
from ansible.module_utils.common.json import Direction, get_module_encoder, get_module_decoder
|
||||
from ansible.module_utils.six import binary_type, string_types, text_type
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.release import __version__
|
||||
from ansible.utils.collection_loader import resource_from_fqcr
|
||||
|
|
@ -52,7 +51,7 @@ if t.TYPE_CHECKING:
|
|||
|
||||
|
||||
def _validate_utf8_json(d):
|
||||
if isinstance(d, text_type):
|
||||
if isinstance(d, str):
|
||||
# Purposefully not using to_bytes here for performance reasons
|
||||
d.encode(encoding='utf-8', errors='strict')
|
||||
elif isinstance(d, dict):
|
||||
|
|
@ -874,7 +873,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin):
|
|||
# happens sometimes when it is a dir and not on bsd
|
||||
if 'checksum' not in mystat['stat']:
|
||||
mystat['stat']['checksum'] = ''
|
||||
elif not isinstance(mystat['stat']['checksum'], string_types):
|
||||
elif not isinstance(mystat['stat']['checksum'], str):
|
||||
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
|
||||
|
||||
return mystat['stat']
|
||||
|
|
@ -1084,7 +1083,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin):
|
|||
# the remote system, which can be read and parsed by the module
|
||||
args_data = ""
|
||||
for k, v in module_args.items():
|
||||
args_data += '%s=%s ' % (k, shlex.quote(text_type(v)))
|
||||
args_data += '%s=%s ' % (k, shlex.quote(str(v)))
|
||||
self._transfer_data(args_file_path, args_data)
|
||||
elif module_style in ('non_native_want_json', 'binary'):
|
||||
profile_encoder = get_module_encoder(module_bits.serialization_profile, Direction.CONTROLLER_TO_MODULE)
|
||||
|
|
@ -1169,7 +1168,7 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin):
|
|||
self._cleanup_remote_tmp = False
|
||||
|
||||
# NOTE: dnf returns results .. but that made it 'compatible' with squashing, so we allow mappings, for now
|
||||
if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], string_types)):
|
||||
if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], str)):
|
||||
data['ansible_module_results'] = data['results']
|
||||
del data['results']
|
||||
display.warning("Found internal 'results' key in module return, renamed to 'ansible_module_results'.")
|
||||
|
|
@ -1322,16 +1321,16 @@ class ActionBase(ABC, _AnsiblePluginInfoMixin):
|
|||
|
||||
# stdout and stderr may be either a file-like or a bytes object.
|
||||
# Convert either one to a text type
|
||||
if isinstance(stdout, binary_type):
|
||||
if isinstance(stdout, bytes):
|
||||
out = to_text(stdout, errors=encoding_errors)
|
||||
elif not isinstance(stdout, text_type):
|
||||
elif not isinstance(stdout, str):
|
||||
out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
|
||||
else:
|
||||
out = stdout
|
||||
|
||||
if isinstance(stderr, binary_type):
|
||||
if isinstance(stderr, bytes):
|
||||
err = to_text(stderr, errors=encoding_errors)
|
||||
elif not isinstance(stderr, text_type):
|
||||
elif not isinstance(stderr, str):
|
||||
err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
|
||||
else:
|
||||
err = stderr
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ from __future__ import annotations
|
|||
from collections.abc import Mapping
|
||||
|
||||
from ansible.errors import AnsibleActionFail
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.parsing.utils.addresses import parse_address
|
||||
from ansible.utils.display import Display
|
||||
|
|
@ -74,7 +73,7 @@ class ActionModule(ActionBase):
|
|||
if groups:
|
||||
if isinstance(groups, list):
|
||||
group_list = groups
|
||||
elif isinstance(groups, string_types):
|
||||
elif isinstance(groups, str):
|
||||
group_list = groups.split(",")
|
||||
else:
|
||||
raise AnsibleActionFail("Groups must be specified as a list.", obj=groups)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ import os
|
|||
import base64
|
||||
from ansible.errors import AnsibleConnectionFailure, AnsibleError, AnsibleActionFail, AnsibleActionSkip
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.utils.display import Display
|
||||
|
|
@ -52,10 +51,10 @@ class ActionModule(ActionBase):
|
|||
|
||||
msg = ''
|
||||
# FIXME: validate source and dest are strings; use basic.py and module specs
|
||||
if not isinstance(source, string_types):
|
||||
if not isinstance(source, str):
|
||||
msg = "Invalid type supplied for source option, it must be a string"
|
||||
|
||||
if not isinstance(dest, string_types):
|
||||
if not isinstance(dest, str):
|
||||
msg = "Invalid type supplied for dest option, it must be a string"
|
||||
|
||||
if source is None or dest is None:
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.module_utils.six import string_types
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
|
@ -42,7 +41,7 @@ class ActionModule(ActionBase):
|
|||
|
||||
group_name = self._task.args.get('key')
|
||||
parent_groups = self._task.args.get('parents', ['all'])
|
||||
if isinstance(parent_groups, string_types):
|
||||
if isinstance(parent_groups, str):
|
||||
parent_groups = [parent_groups]
|
||||
|
||||
result['changed'] = False
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ import pathlib
|
|||
import ansible.constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible._internal._datatag._tags import SourceWasEncrypted
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.utils.vars import combine_vars
|
||||
|
|
@ -38,7 +37,7 @@ class ActionModule(ActionBase):
|
|||
if not self.ignore_files:
|
||||
self.ignore_files = list()
|
||||
|
||||
if isinstance(self.ignore_files, string_types):
|
||||
if isinstance(self.ignore_files, str):
|
||||
self.ignore_files = self.ignore_files.split()
|
||||
|
||||
elif isinstance(self.ignore_files, dict):
|
||||
|
|
@ -66,7 +65,7 @@ class ActionModule(ActionBase):
|
|||
self.valid_extensions = self._task.args.get('extensions', self.VALID_FILE_EXTENSIONS)
|
||||
|
||||
# convert/validate extensions list
|
||||
if isinstance(self.valid_extensions, string_types):
|
||||
if isinstance(self.valid_extensions, str):
|
||||
self.valid_extensions = list(self.valid_extensions)
|
||||
if not isinstance(self.valid_extensions, list):
|
||||
raise AnsibleError('Invalid type for "extensions" option, it must be a list')
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ from ansible.config.manager import ensure_type
|
|||
from ansible.errors import AnsibleError, AnsibleActionFail
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.template import trust_as_template
|
||||
from ansible._internal._templating import _template_vars
|
||||
|
|
@ -49,7 +48,7 @@ class ActionModule(ActionBase):
|
|||
'block_end_string', 'comment_start_string', 'comment_end_string'):
|
||||
if s_type in self._task.args:
|
||||
value = ensure_type(self._task.args[s_type], 'string')
|
||||
if value is not None and not isinstance(value, string_types):
|
||||
if value is not None and not isinstance(value, str):
|
||||
raise AnsibleActionFail("%s is expected to be a string, but got %s instead" % (s_type, type(value)))
|
||||
self._task.args[s_type] = value
|
||||
|
||||
|
|
|
|||
|
|
@ -47,7 +47,6 @@ import typing as t
|
|||
|
||||
import ansible.constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleConnectionFailure
|
||||
from ansible.module_utils.six import text_type, binary_type
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.plugins.connection import ConnectionBase
|
||||
from ansible.utils.display import Display
|
||||
|
|
@ -100,7 +99,7 @@ class Connection(ConnectionBase):
|
|||
display.vvv(u"EXEC {0}".format(to_text(cmd)), host=self._play_context.remote_addr)
|
||||
display.debug("opening command with Popen()")
|
||||
|
||||
if isinstance(cmd, (text_type, binary_type)):
|
||||
if isinstance(cmd, (str, bytes)):
|
||||
cmd = to_text(cmd)
|
||||
else:
|
||||
cmd = map(to_text, cmd)
|
||||
|
|
@ -119,7 +118,7 @@ class Connection(ConnectionBase):
|
|||
|
||||
p = subprocess.Popen(
|
||||
cmd,
|
||||
shell=isinstance(cmd, (text_type, binary_type)),
|
||||
shell=isinstance(cmd, (str, bytes)),
|
||||
executable=executable,
|
||||
cwd=self.cwd,
|
||||
stdin=stdin,
|
||||
|
|
|
|||
|
|
@ -441,7 +441,6 @@ from ansible.errors import (
|
|||
AnsibleError,
|
||||
AnsibleFileNotFound,
|
||||
)
|
||||
from ansible.module_utils.six import text_type, binary_type
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
||||
from ansible.plugins.shell.powershell import _replace_stderr_clixml
|
||||
|
|
@ -1122,7 +1121,7 @@ class Connection(ConnectionBase):
|
|||
|
||||
p = None
|
||||
|
||||
if isinstance(cmd, (text_type, binary_type)):
|
||||
if isinstance(cmd, (str, bytes)):
|
||||
cmd = to_bytes(cmd)
|
||||
else:
|
||||
cmd = list(map(to_bytes, cmd))
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ from ansible._internal._templating import _lazy_containers
|
|||
from ansible.errors import AnsibleFilterError, AnsibleTypeError, AnsibleTemplatePluginError
|
||||
from ansible.module_utils.datatag import native_type_name
|
||||
from ansible.module_utils.common.json import get_encoder, get_decoder
|
||||
from ansible.module_utils.six import string_types, integer_types, text_type
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.common.collections import is_sequence
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
|
|
@ -278,7 +277,7 @@ def rand(environment, end, start=None, step=None, seed=None):
|
|||
r = SystemRandom()
|
||||
else:
|
||||
r = Random(seed)
|
||||
if isinstance(end, integer_types):
|
||||
if isinstance(end, int):
|
||||
if not start:
|
||||
start = 0
|
||||
if not step:
|
||||
|
|
@ -555,7 +554,7 @@ def subelements(obj, subelements, skip_missing=False):
|
|||
|
||||
if isinstance(subelements, list):
|
||||
subelement_list = subelements[:]
|
||||
elif isinstance(subelements, string_types):
|
||||
elif isinstance(subelements, str):
|
||||
subelement_list = subelements.split('.')
|
||||
else:
|
||||
raise AnsibleTypeError('subelements must be a list or a string')
|
||||
|
|
@ -617,7 +616,7 @@ def list_of_dict_key_value_elements_to_dict(mylist, key_name='key', value_name='
|
|||
def path_join(paths):
|
||||
""" takes a sequence or a string, and return a concatenation
|
||||
of the different members """
|
||||
if isinstance(paths, string_types):
|
||||
if isinstance(paths, str):
|
||||
return os.path.join(paths)
|
||||
if is_sequence(paths):
|
||||
return os.path.join(*paths)
|
||||
|
|
@ -809,7 +808,7 @@ class FilterModule(object):
|
|||
'dict2items': dict_to_list_of_dict_key_value_elements,
|
||||
'items2dict': list_of_dict_key_value_elements_to_dict,
|
||||
'subelements': subelements,
|
||||
'split': partial(unicode_wrap, text_type.split),
|
||||
'split': partial(unicode_wrap, str.split),
|
||||
# FDI038 - replace this with a standard type compat shim
|
||||
'groupby': _cleansed_groupby,
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ from jinja2.filters import pass_environment
|
|||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common.text import formatters
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
|
|
@ -180,7 +179,7 @@ def rekey_on_member(data, key, duplicates='error'):
|
|||
|
||||
if isinstance(data, Mapping):
|
||||
iterate_over = data.values()
|
||||
elif isinstance(data, Iterable) and not isinstance(data, (text_type, binary_type)):
|
||||
elif isinstance(data, Iterable) and not isinstance(data, (str, bytes)):
|
||||
iterate_over = data
|
||||
else:
|
||||
raise AnsibleError("Type is not a valid list, set, or dict")
|
||||
|
|
|
|||
|
|
@ -34,7 +34,6 @@ from ansible.parsing.dataloader import DataLoader
|
|||
from ansible.plugins import AnsiblePlugin, _ConfigurablePlugin
|
||||
from ansible.plugins.cache import CachePluginAdjudicator
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.vars import combine_vars, load_extra_vars
|
||||
|
||||
|
|
@ -439,7 +438,7 @@ class Constructable(_BaseInventoryPlugin):
|
|||
new_raw_group_names = []
|
||||
if use_default:
|
||||
new_raw_group_names.append(default_value_name)
|
||||
elif isinstance(key, string_types):
|
||||
elif isinstance(key, str):
|
||||
new_raw_group_names.append(key)
|
||||
elif isinstance(key, list):
|
||||
for name in key:
|
||||
|
|
|
|||
|
|
@ -90,7 +90,6 @@ from collections.abc import MutableMapping, MutableSequence
|
|||
|
||||
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
|
@ -147,7 +146,7 @@ class InventoryModule(BaseFileInventoryPlugin):
|
|||
)
|
||||
|
||||
def _load_file(self, file_name):
|
||||
if not file_name or not isinstance(file_name, string_types):
|
||||
if not file_name or not isinstance(file_name, str):
|
||||
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_name))
|
||||
|
||||
b_file_name = to_bytes(self.loader.path_dwim(file_name))
|
||||
|
|
|
|||
|
|
@ -70,7 +70,6 @@ import os
|
|||
from collections.abc import MutableMapping
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
||||
|
||||
|
|
@ -136,7 +135,7 @@ class InventoryModule(BaseFileInventoryPlugin):
|
|||
for section in ['vars', 'children', 'hosts']:
|
||||
if section in group_data:
|
||||
# convert strings to dicts as these are allowed
|
||||
if isinstance(group_data[section], string_types):
|
||||
if isinstance(group_data[section], str):
|
||||
group_data[section] = {group_data[section]: None}
|
||||
|
||||
if not isinstance(group_data[section], (MutableMapping, NoneType)): # type: ignore[misc]
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ from ansible import _internal, constants as C
|
|||
from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||
from ansible.module_utils.datatag import deprecator_from_collection_name
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
from ansible._internal._yaml._loader import AnsibleInstrumentedLoader
|
||||
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, AnsibleJinja2Plugin
|
||||
|
|
@ -96,7 +95,7 @@ def get_shell_plugin(shell_type=None, executable=None):
|
|||
|
||||
# mostly for backwards compat
|
||||
if executable:
|
||||
if isinstance(executable, string_types):
|
||||
if isinstance(executable, str):
|
||||
shell_filename = os.path.basename(executable)
|
||||
try:
|
||||
shell = shell_loader.get(shell_filename)
|
||||
|
|
|
|||
|
|
@ -134,7 +134,6 @@ import hashlib
|
|||
|
||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.splitter import parse_kv
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.utils.encrypt import BaseHash, do_encrypt, random_password, random_salt
|
||||
|
|
@ -335,7 +334,7 @@ class LookupModule(LookupBase):
|
|||
|
||||
# chars still might need more
|
||||
chars = params.get('chars', self.get_option('chars'))
|
||||
if chars and isinstance(chars, string_types):
|
||||
if chars and isinstance(chars, str):
|
||||
tmp_chars = []
|
||||
if u',,' in chars:
|
||||
tmp_chars.append(u',')
|
||||
|
|
|
|||
|
|
@ -83,7 +83,6 @@ _list:
|
|||
"""
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
|
||||
|
|
@ -104,7 +103,7 @@ class LookupModule(LookupBase):
|
|||
_raise_terms_error()
|
||||
|
||||
# first term should be a list (or dict), second a string holding the subkey
|
||||
if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], string_types):
|
||||
if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], str):
|
||||
_raise_terms_error("first a dict or a list, second a string pointing to the subkey")
|
||||
subelements = terms[1].split(".")
|
||||
|
||||
|
|
@ -122,7 +121,7 @@ class LookupModule(LookupBase):
|
|||
flags = {}
|
||||
if len(terms) == 3:
|
||||
flags = terms[2]
|
||||
if not isinstance(flags, dict) and not all(isinstance(key, string_types) and key in FLAGS for key in flags):
|
||||
if not isinstance(flags, dict) and not all(isinstance(key, str) and key in FLAGS for key in flags):
|
||||
_raise_terms_error("the optional third item must be a dict with flags %s" % FLAGS)
|
||||
|
||||
# build_items
|
||||
|
|
|
|||
|
|
@ -52,7 +52,6 @@ import re
|
|||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
|
||||
|
||||
|
|
@ -69,7 +68,7 @@ class LookupModule(LookupBase):
|
|||
variable_names = list(variables.keys())
|
||||
for term in terms:
|
||||
|
||||
if not isinstance(term, string_types):
|
||||
if not isinstance(term, str):
|
||||
raise AnsibleError('Invalid setting identifier, "%s" is not a string, it is a %s' % (term, type(term)))
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -24,11 +24,8 @@ import secrets
|
|||
import shlex
|
||||
import time
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.six import text_type, string_types
|
||||
from ansible.plugins import AnsiblePlugin
|
||||
|
||||
_USER_HOME_PATH_RE = re.compile(r'^~[_.A-Za-z0-9][-_.A-Za-z0-9]*$')
|
||||
|
|
@ -84,7 +81,7 @@ class ShellBase(AnsiblePlugin):
|
|||
return 'ansible-tmp-%s-%s-%s' % (time.time(), os.getpid(), secrets.randbelow(2**48))
|
||||
|
||||
def env_prefix(self, **kwargs):
|
||||
return ' '.join(['%s=%s' % (k, self.quote(text_type(v))) for k, v in kwargs.items()])
|
||||
return ' '.join(['%s=%s' % (k, self.quote(str(v))) for k, v in kwargs.items()])
|
||||
|
||||
def join_path(self, *args):
|
||||
return os.path.join(*args)
|
||||
|
|
|
|||
|
|
@ -9,14 +9,14 @@ from __future__ import annotations
|
|||
from abc import ABCMeta
|
||||
from collections.abc import Container, Mapping, Sequence, Set
|
||||
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.common.collections import ImmutableDict
|
||||
from ansible.module_utils.six import add_metaclass, binary_type, text_type
|
||||
from ansible.utils.singleton import Singleton
|
||||
|
||||
|
||||
def _make_immutable(obj):
|
||||
"""Recursively convert a container and objects inside of it into immutable data types"""
|
||||
if isinstance(obj, (text_type, binary_type)):
|
||||
if isinstance(obj, (str, bytes)):
|
||||
# Strings first because they are also sequences
|
||||
return obj
|
||||
elif isinstance(obj, Mapping):
|
||||
|
|
@ -79,11 +79,14 @@ class CLIArgs(ImmutableDict):
|
|||
return cls(vars(options))
|
||||
|
||||
|
||||
@add_metaclass(_ABCSingleton)
|
||||
class GlobalCLIArgs(CLIArgs):
|
||||
class GlobalCLIArgs(CLIArgs, metaclass=_ABCSingleton):
|
||||
"""
|
||||
Globally hold a parsed copy of cli arguments.
|
||||
|
||||
Only one of these exist per program as it is for global context
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type", "add_metaclass")
|
||||
|
|
|
|||
|
|
@ -53,11 +53,10 @@ from ansible.constants import config
|
|||
from ansible.errors import AnsibleAssertionError, AnsiblePromptInterrupt, AnsiblePromptNoninteractive, AnsibleError
|
||||
from ansible._internal._errors import _error_utils, _error_factory
|
||||
from ansible._internal import _event_formatting
|
||||
from ansible.module_utils._internal import _ambient_context, _deprecator, _messages
|
||||
from ansible.module_utils._internal import _ambient_context, _deprecator, _messages, _no_six
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.module_utils.datatag import deprecator_from_collection_name
|
||||
from ansible._internal._datatag._tags import TrustedAsTemplate
|
||||
from ansible.module_utils.six import text_type
|
||||
from ansible.module_utils._internal import _traceback, _errors
|
||||
from ansible.utils.color import stringc
|
||||
from ansible.utils.multiprocessing import context as multiprocessing_context
|
||||
|
|
@ -106,7 +105,7 @@ def get_text_width(text: str) -> int:
|
|||
character and using wcwidth individually, falling back to a value of 0
|
||||
for non-printable wide characters.
|
||||
"""
|
||||
if not isinstance(text, text_type):
|
||||
if not isinstance(text, str):
|
||||
raise TypeError('get_text_width requires text, not %s' % type(text))
|
||||
|
||||
try:
|
||||
|
|
@ -1282,3 +1281,7 @@ def _report_config_warnings(deprecator: _messages.PluginInfo) -> None:
|
|||
# emit any warnings or deprecations
|
||||
# in the event config fails before display is up, we'll lose warnings -- but that's OK, since everything is broken anyway
|
||||
_report_config_warnings(_deprecator.ANSIBLE_CORE_DEPRECATOR)
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "text_type")
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils._internal import _no_six
|
||||
|
||||
|
||||
def pct_to_int(value, num_items, min_value=1):
|
||||
|
|
@ -25,7 +25,7 @@ def pct_to_int(value, num_items, min_value=1):
|
|||
Converts a given value to a percentage if specified as "x%",
|
||||
otherwise converts the given value to an integer.
|
||||
"""
|
||||
if isinstance(value, string_types) and value.endswith('%'):
|
||||
if isinstance(value, str) and value.endswith('%'):
|
||||
value_pct = int(value.replace("%", ""))
|
||||
return int((value_pct / 100.0) * num_items) or min_value
|
||||
else:
|
||||
|
|
@ -47,3 +47,7 @@ def deduplicate_list(original_list):
|
|||
"""
|
||||
seen = set()
|
||||
return [x for x in original_list if x not in seen and not seen.add(x)]
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "string_types")
|
||||
|
|
|
|||
|
|
@ -7,9 +7,9 @@ import json
|
|||
import pickle
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.connection import ConnectionError
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
|
@ -79,9 +79,9 @@ class JsonRpcServer(object):
|
|||
|
||||
def response(self, result=None):
|
||||
response = self.header()
|
||||
if isinstance(result, binary_type):
|
||||
if isinstance(result, bytes):
|
||||
result = to_text(result)
|
||||
if not isinstance(result, text_type):
|
||||
if not isinstance(result, str):
|
||||
response["result_type"] = "pickle"
|
||||
result = to_text(pickle.dumps(result), errors='surrogateescape')
|
||||
response['result'] = result
|
||||
|
|
@ -110,3 +110,7 @@ class JsonRpcServer(object):
|
|||
|
||||
def internal_error(self, data=None):
|
||||
return self.error(-32603, 'Internal error', data)
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type")
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import yaml
|
|||
from ansible import constants as C
|
||||
from ansible.release import __version__ as ansible_version
|
||||
from ansible.errors import AnsibleError, AnsibleParserError, AnsiblePluginNotFound
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.parsing.plugin_docs import read_docstring
|
||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
|
|
@ -133,7 +133,7 @@ def add_fragments(doc, filename, fragment_loader, is_module=False, section='DOCU
|
|||
|
||||
fragments = doc.pop('extends_documentation_fragment', [])
|
||||
|
||||
if isinstance(fragments, string_types):
|
||||
if isinstance(fragments, str):
|
||||
fragments = fragments.split(',')
|
||||
|
||||
unknown_fragments = []
|
||||
|
|
@ -355,3 +355,7 @@ def get_plugin_docs(plugin, plugin_type, loader, fragment_loader, verbose):
|
|||
docs[0]['plugin_name'] = context.resolved_fqcn
|
||||
|
||||
return docs
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "string_types")
|
||||
|
|
|
|||
|
|
@ -7,10 +7,10 @@ from __future__ import annotations
|
|||
|
||||
from collections.abc import Mapping, Set
|
||||
|
||||
from ansible.module_utils._internal import _no_six
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.module_utils.common.collections import is_sequence
|
||||
from ansible._internal._datatag._tags import TrustedAsTemplate
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
|
||||
__all__ = ['AnsibleUnsafe', 'wrap_var']
|
||||
|
||||
|
|
@ -62,9 +62,9 @@ def wrap_var(v):
|
|||
v = _wrap_set(v)
|
||||
elif is_sequence(v):
|
||||
v = _wrap_sequence(v)
|
||||
elif isinstance(v, binary_type):
|
||||
elif isinstance(v, bytes):
|
||||
v = AnsibleUnsafeBytes(v)
|
||||
elif isinstance(v, text_type):
|
||||
elif isinstance(v, str):
|
||||
v = AnsibleUnsafeText(v)
|
||||
|
||||
return v
|
||||
|
|
@ -76,3 +76,7 @@ def to_unsafe_bytes(*args, **kwargs):
|
|||
|
||||
def to_unsafe_text(*args, **kwargs):
|
||||
return wrap_var(to_text(*args, **kwargs))
|
||||
|
||||
|
||||
def __getattr__(importable_name):
|
||||
return _no_six.deprecate(importable_name, __name__, "binary_type", "text_type")
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ from collections.abc import MutableMapping, MutableSequence
|
|||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils import six
|
||||
from ansible.plugins.loader import connection_loader
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
|
@ -48,7 +47,7 @@ def module_response_deepcopy(v):
|
|||
"""
|
||||
if isinstance(v, dict):
|
||||
ret = v.copy()
|
||||
items = six.iteritems(ret)
|
||||
items = ret.items()
|
||||
elif isinstance(v, list):
|
||||
ret = v[:]
|
||||
items = enumerate(ret)
|
||||
|
|
@ -80,7 +79,7 @@ def strip_internal_keys(dirty, exceptions=None):
|
|||
|
||||
# listify to avoid updating dict while iterating over it
|
||||
for k in list(dirty.keys()):
|
||||
if isinstance(k, six.string_types):
|
||||
if isinstance(k, str):
|
||||
if k.startswith('_ansible_') and k not in exceptions:
|
||||
del dirty[k]
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -33,7 +33,6 @@ from ansible.inventory.host import Host
|
|||
from ansible.inventory.helpers import sort_groups, get_group_vars
|
||||
from ansible.inventory.manager import InventoryManager
|
||||
from ansible.module_utils.datatag import native_type_name
|
||||
from ansible.module_utils.six import text_type
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible._internal._templating._engine import TemplateEngine
|
||||
from ansible.plugins.loader import cache_loader
|
||||
|
|
@ -467,7 +466,7 @@ class VariableManager:
|
|||
if task._role:
|
||||
variables['role_name'] = task._role.get_name(include_role_fqcn=False)
|
||||
variables['role_path'] = task._role._role_path
|
||||
variables['role_uuid'] = text_type(task._role._uuid)
|
||||
variables['role_uuid'] = str(task._role._uuid)
|
||||
variables['ansible_collection_name'] = task._role._role_collection
|
||||
variables['ansible_role_name'] = task._role.get_name()
|
||||
|
||||
|
|
|
|||
|
|
@ -6,12 +6,6 @@ import os
|
|||
import pexpect
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.six import PY2
|
||||
|
||||
if PY2:
|
||||
log_buffer = sys.stdout
|
||||
else:
|
||||
log_buffer = sys.stdout.buffer
|
||||
|
||||
env_vars = {
|
||||
'ANSIBLE_ROLES_PATH': './roles',
|
||||
|
|
@ -36,7 +30,7 @@ def run_test(playbook, test_spec, args=None, timeout=10, env=None):
|
|||
env=env,
|
||||
)
|
||||
|
||||
vars_prompt_test.logfile = log_buffer
|
||||
vars_prompt_test.logfile = sys.stdout.buffer
|
||||
for item in test_spec[0]:
|
||||
vars_prompt_test.expect(item[0])
|
||||
if item[1]:
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import pexpect
|
|||
import sys
|
||||
import termios
|
||||
|
||||
from ansible.module_utils.six import PY2
|
||||
|
||||
args = sys.argv[1:]
|
||||
|
||||
|
|
@ -22,11 +21,6 @@ try:
|
|||
except Exception:
|
||||
backspace = b'\x7f'
|
||||
|
||||
if PY2:
|
||||
log_buffer = sys.stdout
|
||||
else:
|
||||
log_buffer = sys.stdout.buffer
|
||||
|
||||
os.environ.update(env_vars)
|
||||
|
||||
# -- Plain pause -- #
|
||||
|
|
@ -40,7 +34,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
||||
pause_test.send('\r')
|
||||
pause_test.expect('Task after pause')
|
||||
|
|
@ -56,7 +50,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
||||
pause_test.send('\x03')
|
||||
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
||||
|
|
@ -74,7 +68,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Press enter to continue, Ctrl\+C to interrupt:')
|
||||
pause_test.send('\x03')
|
||||
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
||||
|
|
@ -94,7 +88,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Custom prompt:')
|
||||
pause_test.send('\r')
|
||||
pause_test.expect('Task after pause')
|
||||
|
|
@ -110,7 +104,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Custom prompt:')
|
||||
pause_test.send('\x03')
|
||||
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
||||
|
|
@ -128,7 +122,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Custom prompt:')
|
||||
pause_test.send('\x03')
|
||||
pause_test.expect("Press 'C' to continue the play or 'A' to abort")
|
||||
|
|
@ -149,7 +143,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Pausing for \d+ seconds')
|
||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||
pause_test.expect('Task after pause')
|
||||
|
|
@ -164,7 +158,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Pausing for \d+ seconds')
|
||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||
pause_test.send('\n') # test newline does not stop the prompt - waiting for a timeout or ctrl+C
|
||||
|
|
@ -184,7 +178,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Pausing for \d+ seconds')
|
||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||
pause_test.send('\x03')
|
||||
|
|
@ -206,7 +200,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Pausing for \d+ seconds')
|
||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||
pause_test.expect(r"Waiting for two seconds:")
|
||||
|
|
@ -222,7 +216,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Pausing for \d+ seconds')
|
||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||
pause_test.expect(r"Waiting for two seconds:")
|
||||
|
|
@ -242,7 +236,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Pausing for \d+ seconds')
|
||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||
pause_test.expect(r"Waiting for two seconds:")
|
||||
|
|
@ -264,7 +258,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Enter some text:')
|
||||
pause_test.send('hello there')
|
||||
pause_test.send('\r')
|
||||
|
|
@ -290,7 +284,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r'Wait for three seconds:')
|
||||
pause_test.send('ignored user input')
|
||||
pause_test.expect('Task after pause')
|
||||
|
|
@ -307,7 +301,7 @@ pause_test = pexpect.spawn(
|
|||
env=os.environ
|
||||
)
|
||||
|
||||
pause_test.logfile = log_buffer
|
||||
pause_test.logfile = sys.stdout.buffer
|
||||
pause_test.expect(r"\(ctrl\+C then 'C' = continue early, ctrl\+C then 'A' = abort\)")
|
||||
pause_test.send('\r')
|
||||
pause_test.expect(pexpect.EOF)
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ import os
|
|||
from collections.abc import MutableMapping
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.plugins.inventory import BaseFileInventoryPlugin
|
||||
|
||||
|
|
@ -126,7 +125,7 @@ class InventoryModule(BaseFileInventoryPlugin):
|
|||
for section in ['vars', 'children', 'hosts']:
|
||||
if section in group_data:
|
||||
# convert strings to dicts as these are allowed
|
||||
if isinstance(group_data[section], string_types):
|
||||
if isinstance(group_data[section], str):
|
||||
group_data[section] = {group_data[section]: None}
|
||||
|
||||
if not isinstance(group_data[section], (MutableMapping, NoneType)):
|
||||
|
|
|
|||
|
|
@ -17,14 +17,13 @@ from voluptuous import Required, Schema, Invalid
|
|||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ansible.module_utils.compat.version import StrictVersion, LooseVersion
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.utils.collection_loader import AnsibleCollectionRef
|
||||
from ansible.utils.version import SemanticVersion
|
||||
|
||||
|
||||
def fqcr(value):
|
||||
"""Validate a FQCR."""
|
||||
if not isinstance(value, string_types):
|
||||
if not isinstance(value, str):
|
||||
raise Invalid('Must be a string that is a FQCR')
|
||||
if not AnsibleCollectionRef.is_valid_fqcr(value):
|
||||
raise Invalid('Must be a FQCR')
|
||||
|
|
@ -33,7 +32,7 @@ def fqcr(value):
|
|||
|
||||
def fqcr_or_shortname(value):
|
||||
"""Validate a FQCR or a shortname."""
|
||||
if not isinstance(value, string_types):
|
||||
if not isinstance(value, str):
|
||||
raise Invalid('Must be a string that is a FQCR or a short name')
|
||||
if '.' in value and not AnsibleCollectionRef.is_valid_fqcr(value):
|
||||
raise Invalid('Must be a FQCR or a short name')
|
||||
|
|
@ -48,7 +47,7 @@ def isodate(value, check_deprecation_date=False, is_tombstone=False):
|
|||
else:
|
||||
# make sure we have a string
|
||||
msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date'
|
||||
if not isinstance(value, string_types):
|
||||
if not isinstance(value, str):
|
||||
raise Invalid(msg)
|
||||
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
|
||||
# we have to do things manually.
|
||||
|
|
@ -80,7 +79,7 @@ def removal_version(value, is_ansible, current_version=None, is_tombstone=False)
|
|||
'Removal version must be a string' if is_ansible else
|
||||
'Removal version must be a semantic version (https://semver.org/)'
|
||||
)
|
||||
if not isinstance(value, string_types):
|
||||
if not isinstance(value, str):
|
||||
raise Invalid(msg)
|
||||
try:
|
||||
if is_ansible:
|
||||
|
|
@ -191,7 +190,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
|||
'removal_version': partial(removal_version, is_ansible=is_ansible,
|
||||
current_version=current_version),
|
||||
'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates),
|
||||
'warning_text': Any(*string_types),
|
||||
'warning_text': str,
|
||||
}
|
||||
),
|
||||
avoid_additional_data
|
||||
|
|
@ -204,7 +203,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
|||
'removal_version': partial(removal_version, is_ansible=is_ansible,
|
||||
current_version=current_version, is_tombstone=True),
|
||||
'removal_date': partial(isodate, is_tombstone=True),
|
||||
'warning_text': Any(*string_types),
|
||||
'warning_text': str,
|
||||
}
|
||||
),
|
||||
avoid_additional_data
|
||||
|
|
@ -228,18 +227,15 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
|||
# Adjusted schema for module_utils
|
||||
plugin_routing_schema_mu = Any(
|
||||
plugins_routing_common_schema.extend({
|
||||
('redirect'): Any(*string_types)}
|
||||
('redirect'): str}
|
||||
),
|
||||
)
|
||||
|
||||
list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema}
|
||||
for str_type in string_types]
|
||||
list_dict_plugin_routing_schema = [{str: plugin_routing_schema}]
|
||||
|
||||
list_dict_plugin_routing_schema_mu = [{str_type: plugin_routing_schema_mu}
|
||||
for str_type in string_types]
|
||||
list_dict_plugin_routing_schema_mu = [{str: plugin_routing_schema_mu}]
|
||||
|
||||
list_dict_plugin_routing_schema_modules = [{str_type: plugin_routing_schema_modules}
|
||||
for str_type in string_types]
|
||||
list_dict_plugin_routing_schema_modules = [{str: plugin_routing_schema_modules}]
|
||||
|
||||
plugin_schema = Schema({
|
||||
('action'): Any(None, *list_dict_plugin_routing_schema),
|
||||
|
|
@ -267,13 +263,12 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
|||
|
||||
import_redirection_schema = Any(
|
||||
Schema({
|
||||
('redirect'): Any(*string_types),
|
||||
('redirect'): str,
|
||||
# import_redirect doesn't currently support deprecation
|
||||
}, extra=PREVENT_EXTRA)
|
||||
)
|
||||
|
||||
list_dict_import_redirection_schema = [{str_type: import_redirection_schema}
|
||||
for str_type in string_types]
|
||||
list_dict_import_redirection_schema = [{str: import_redirection_schema}]
|
||||
|
||||
# action_groups schema
|
||||
|
||||
|
|
@ -289,7 +284,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
|||
}, extra=PREVENT_EXTRA)
|
||||
}, extra=PREVENT_EXTRA)
|
||||
action_group_schema = All([metadata_dict, fqcr_or_shortname], at_most_one_dict)
|
||||
list_dict_action_groups_schema = [{str_type: action_group_schema} for str_type in string_types]
|
||||
list_dict_action_groups_schema = [{str: action_group_schema}]
|
||||
|
||||
# top level schema
|
||||
|
||||
|
|
@ -298,7 +293,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
|
|||
('plugin_routing'): Any(plugin_schema),
|
||||
('import_redirection'): Any(None, *list_dict_import_redirection_schema),
|
||||
# requires_ansible: In the future we should validate this with SpecifierSet
|
||||
('requires_ansible'): Any(*string_types),
|
||||
('requires_ansible'): str,
|
||||
('action_groups'): Any(*list_dict_action_groups_schema),
|
||||
}, extra=PREVENT_EXTRA)
|
||||
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ from contextlib import contextmanager
|
|||
|
||||
from ansible.executor.powershell.module_manifest import PSModuleDepFinder
|
||||
from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS, AnsibleModule
|
||||
from ansible.module_utils.six import reraise
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
|
||||
from .utils import CaptureStd, find_executable, get_module_name_from_filename
|
||||
|
|
@ -153,7 +152,7 @@ def get_py_argument_spec(filename, collection):
|
|||
pass
|
||||
except BaseException as e:
|
||||
# we want to catch all exceptions here, including sys.exit
|
||||
reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2])
|
||||
raise AnsibleModuleImportError from e
|
||||
|
||||
if not fake.called:
|
||||
raise AnsibleModuleNotInitialized()
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ from urllib.parse import urlparse
|
|||
|
||||
from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, MultipleInvalid, Required, Schema, Self, ValueInvalid, Exclusive
|
||||
from ansible.constants import DOCUMENTABLE_PLUGINS
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.collections import is_iterable
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.parsing.quoting import unquote
|
||||
|
|
@ -25,9 +24,8 @@ from antsibull_docs_parser.parser import parse, Context
|
|||
|
||||
from .utils import parse_isodate
|
||||
|
||||
list_string_types = list(string_types)
|
||||
tuple_string_types = tuple(string_types)
|
||||
any_string_types = Any(*string_types)
|
||||
list_string_types = [str]
|
||||
tuple_string_types = (str,)
|
||||
|
||||
# Valid DOCUMENTATION.author lines
|
||||
# Based on Ansibulbot's extract_github_id()
|
||||
|
|
@ -57,7 +55,7 @@ FULLY_QUALIFIED_COLLECTION_RESOURCE_RE = re.compile(r'^\w+(?:\.\w+){2,}$')
|
|||
|
||||
|
||||
def collection_name(v, error_code=None):
|
||||
if not isinstance(v, string_types):
|
||||
if not isinstance(v, str):
|
||||
raise _add_ansible_error_code(
|
||||
Invalid('Collection name must be a string'), error_code or 'collection-invalid-name')
|
||||
m = COLLECTION_NAME_RE.match(v)
|
||||
|
|
@ -68,7 +66,7 @@ def collection_name(v, error_code=None):
|
|||
|
||||
|
||||
def fqcn(v, error_code=None):
|
||||
if not isinstance(v, string_types):
|
||||
if not isinstance(v, str):
|
||||
raise _add_ansible_error_code(
|
||||
Invalid('Module/plugin name must be a string'), error_code or 'invalid-documentation')
|
||||
m = FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(v)
|
||||
|
|
@ -87,8 +85,8 @@ def deprecation_versions():
|
|||
def version(for_collection=False):
|
||||
if for_collection:
|
||||
# We do not accept floats for versions in collections
|
||||
return Any(*string_types)
|
||||
return Any(float, *string_types)
|
||||
return str
|
||||
return Any(float, str)
|
||||
|
||||
|
||||
def date(error_code=None):
|
||||
|
|
@ -128,7 +126,7 @@ def _check_url(directive, content):
|
|||
|
||||
def doc_string(v):
|
||||
"""Match a documentation string."""
|
||||
if not isinstance(v, string_types):
|
||||
if not isinstance(v, str):
|
||||
raise _add_ansible_error_code(
|
||||
Invalid('Must be a string'), 'invalid-documentation')
|
||||
errors = []
|
||||
|
|
@ -216,12 +214,12 @@ seealso_schema = Schema(
|
|||
'description': doc_string,
|
||||
},
|
||||
{
|
||||
Required('ref'): Any(*string_types),
|
||||
Required('ref'): str,
|
||||
Required('description'): doc_string,
|
||||
},
|
||||
{
|
||||
Required('name'): Any(*string_types),
|
||||
Required('link'): Any(*string_types),
|
||||
Required('name'): str,
|
||||
Required('link'): str,
|
||||
Required('description'): doc_string,
|
||||
},
|
||||
),
|
||||
|
|
@ -238,7 +236,7 @@ argument_spec_modifiers = {
|
|||
'required_together': sequence_of_sequences(min=2),
|
||||
'required_one_of': sequence_of_sequences(min=2),
|
||||
'required_if': sequence_of_sequences(min=3, max=4),
|
||||
'required_by': Schema({str: Any(list_string_types, tuple_string_types, *string_types)}),
|
||||
'required_by': Schema({str: Any(list_string_types, tuple_string_types, str)}),
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -263,7 +261,7 @@ def options_with_apply_defaults(v):
|
|||
def check_removal_version(v, version_field, collection_name_field, error_code='invalid-removal-version'):
|
||||
version = v.get(version_field)
|
||||
collection_name = v.get(collection_name_field)
|
||||
if not isinstance(version, string_types) or not isinstance(collection_name, string_types):
|
||||
if not isinstance(version, str) or not isinstance(collection_name, str):
|
||||
# If they are not strings, schema validation will have already complained.
|
||||
return v
|
||||
if collection_name == 'ansible.builtin':
|
||||
|
|
@ -313,9 +311,8 @@ def option_deprecation(v):
|
|||
|
||||
|
||||
def argument_spec_schema(for_collection):
|
||||
any_string_types = Any(*string_types)
|
||||
schema = {
|
||||
any_string_types: {
|
||||
str: {
|
||||
'type': Any(is_callable, *argument_spec_types),
|
||||
'elements': Any(*argument_spec_types),
|
||||
'default': object,
|
||||
|
|
@ -336,12 +333,12 @@ def argument_spec_schema(for_collection):
|
|||
'deprecated_aliases': Any([All(
|
||||
Any(
|
||||
{
|
||||
Required('name'): Any(*string_types),
|
||||
Required('name'): str,
|
||||
Required('date'): date(),
|
||||
Required('collection_name'): collection_name,
|
||||
},
|
||||
{
|
||||
Required('name'): Any(*string_types),
|
||||
Required('name'): str,
|
||||
Required('version'): version(for_collection),
|
||||
Required('collection_name'): collection_name,
|
||||
},
|
||||
|
|
@ -353,13 +350,13 @@ def argument_spec_schema(for_collection):
|
|||
)]),
|
||||
}
|
||||
}
|
||||
schema[any_string_types].update(argument_spec_modifiers)
|
||||
schema[str].update(argument_spec_modifiers)
|
||||
schemas = All(
|
||||
schema,
|
||||
Schema({any_string_types: no_required_with_default}),
|
||||
Schema({any_string_types: elements_with_list}),
|
||||
Schema({any_string_types: options_with_apply_defaults}),
|
||||
Schema({any_string_types: option_deprecation}),
|
||||
Schema({str: no_required_with_default}),
|
||||
Schema({str: elements_with_list}),
|
||||
Schema({str: options_with_apply_defaults}),
|
||||
Schema({str: option_deprecation}),
|
||||
)
|
||||
return Schema(schemas)
|
||||
|
||||
|
|
@ -385,14 +382,15 @@ json_value = Schema(Any(
|
|||
int,
|
||||
float,
|
||||
[Self],
|
||||
*(list({str_type: Self} for str_type in string_types) + list(string_types))
|
||||
{str: Self},
|
||||
str,
|
||||
))
|
||||
|
||||
|
||||
def version_added(v, error_code='version-added-invalid', accept_historical=False):
|
||||
if 'version_added' in v:
|
||||
version_added = v.get('version_added')
|
||||
if isinstance(version_added, string_types):
|
||||
if isinstance(version_added, str):
|
||||
# If it is not a string, schema validation will have already complained
|
||||
# - or we have a float and we are in ansible/ansible, in which case we're
|
||||
# also happy.
|
||||
|
|
@ -451,7 +449,7 @@ def get_type_checker(v):
|
|||
elt_checker, elt_name = get_type_checker({'type': v.get('elements')})
|
||||
|
||||
def list_checker(value):
|
||||
if isinstance(value, string_types):
|
||||
if isinstance(value, str):
|
||||
value = [unquote(x.strip()) for x in value.split(',')]
|
||||
if not isinstance(value, list):
|
||||
raise ValueError('Value must be a list')
|
||||
|
|
@ -482,14 +480,14 @@ def get_type_checker(v):
|
|||
|
||||
if v_type in ('str', 'string', 'path', 'tmp', 'temppath', 'tmppath'):
|
||||
def str_checker(value):
|
||||
if not isinstance(value, string_types):
|
||||
if not isinstance(value, str):
|
||||
raise ValueError('Value must be string')
|
||||
|
||||
return str_checker, v_type
|
||||
|
||||
if v_type in ('pathspec', 'pathlist'):
|
||||
def path_list_checker(value):
|
||||
if not isinstance(value, string_types) and not is_iterable(value):
|
||||
if not isinstance(value, str) and not is_iterable(value):
|
||||
raise ValueError('Value must be string or list of strings')
|
||||
|
||||
return path_list_checker, v_type
|
||||
|
|
@ -588,7 +586,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
'elements': element_types,
|
||||
}
|
||||
if plugin_type != 'module':
|
||||
basic_option_schema['name'] = Any(*string_types)
|
||||
basic_option_schema['name'] = str
|
||||
deprecated_schema = All(
|
||||
Schema(
|
||||
All(
|
||||
|
|
@ -605,10 +603,10 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
},
|
||||
{
|
||||
# This definition makes sure that everything we require is there
|
||||
Required('why'): Any(*string_types),
|
||||
Required(Any('alternatives', 'alternative')): Any(*string_types),
|
||||
Required(Any('removed_at_date', 'version')): Any(*string_types),
|
||||
Required('collection_name'): Any(*string_types),
|
||||
Required('why'): str,
|
||||
Required(Any('alternatives', 'alternative')): str,
|
||||
Required(Any('removed_at_date', 'version')): str,
|
||||
Required('collection_name'): str,
|
||||
},
|
||||
),
|
||||
extra=PREVENT_EXTRA
|
||||
|
|
@ -620,7 +618,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
)
|
||||
env_schema = All(
|
||||
Schema({
|
||||
Required('name'): Any(*string_types),
|
||||
Required('name'): str,
|
||||
'deprecated': deprecated_schema,
|
||||
'version_added': version(for_collection),
|
||||
'version_added_collection': collection_name,
|
||||
|
|
@ -629,8 +627,8 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
)
|
||||
ini_schema = All(
|
||||
Schema({
|
||||
Required('key'): Any(*string_types),
|
||||
Required('section'): Any(*string_types),
|
||||
Required('key'): str,
|
||||
Required('section'): str,
|
||||
'deprecated': deprecated_schema,
|
||||
'version_added': version(for_collection),
|
||||
'version_added_collection': collection_name,
|
||||
|
|
@ -639,7 +637,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
)
|
||||
vars_schema = All(
|
||||
Schema({
|
||||
Required('name'): Any(*string_types),
|
||||
Required('name'): str,
|
||||
'deprecated': deprecated_schema,
|
||||
'version_added': version(for_collection),
|
||||
'version_added_collection': collection_name,
|
||||
|
|
@ -648,8 +646,8 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
)
|
||||
cli_schema = All(
|
||||
Schema({
|
||||
Required('name'): Any(*string_types),
|
||||
'option': Any(*string_types),
|
||||
Required('name'): str,
|
||||
'option': str,
|
||||
'deprecated': deprecated_schema,
|
||||
'version_added': version(for_collection),
|
||||
'version_added_collection': collection_name,
|
||||
|
|
@ -658,7 +656,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
)
|
||||
keyword_schema = All(
|
||||
Schema({
|
||||
Required('name'): Any(*string_types),
|
||||
Required('name'): str,
|
||||
'deprecated': deprecated_schema,
|
||||
'version_added': version(for_collection),
|
||||
'version_added_collection': collection_name,
|
||||
|
|
@ -677,7 +675,7 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
suboption_schema = dict(basic_option_schema)
|
||||
suboption_schema.update({
|
||||
# Recursive suboptions
|
||||
'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)),
|
||||
'suboptions': Any(None, {str: Self}),
|
||||
})
|
||||
suboption_schema = Schema(All(
|
||||
suboption_schema,
|
||||
|
|
@ -686,13 +684,9 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
check_option_default,
|
||||
), extra=PREVENT_EXTRA)
|
||||
|
||||
# This generates list of dicts with keys from string_types and suboption_schema value
|
||||
# for example in Python 3: {str: suboption_schema}
|
||||
list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types]
|
||||
|
||||
option_schema = dict(basic_option_schema)
|
||||
option_schema.update({
|
||||
'suboptions': Any(None, *list_dict_suboption_schema),
|
||||
'suboptions': Any(None, {str: suboption_schema}),
|
||||
})
|
||||
option_schema = Schema(All(
|
||||
option_schema,
|
||||
|
|
@ -703,20 +697,18 @@ def list_dict_option_schema(for_collection, plugin_type):
|
|||
|
||||
option_version_added = Schema(
|
||||
All({
|
||||
'suboptions': Any(None, *[{str_type: Self} for str_type in string_types]),
|
||||
'suboptions': Any(None, {str: Self}),
|
||||
}, partial(version_added, error_code='option-invalid-version-added')),
|
||||
extra=ALLOW_EXTRA
|
||||
)
|
||||
|
||||
# This generates list of dicts with keys from string_types and option_schema value
|
||||
# for example in Python 3: {str: option_schema}
|
||||
return [{str_type: All(option_schema, option_version_added)} for str_type in string_types]
|
||||
return [{str: All(option_schema, option_version_added)}]
|
||||
|
||||
|
||||
def return_contains(v):
|
||||
schema = Schema(
|
||||
{
|
||||
Required('contains'): Any(dict, list, *string_types)
|
||||
Required('contains'): Any(dict, list, str)
|
||||
},
|
||||
extra=ALLOW_EXTRA
|
||||
)
|
||||
|
|
@ -752,7 +744,7 @@ def return_schema(for_collection, plugin_type='module'):
|
|||
|
||||
inner_return_option_schema = dict(basic_return_option_schema)
|
||||
inner_return_option_schema.update({
|
||||
'contains': Any(None, *list({str_type: Self} for str_type in string_types)),
|
||||
'contains': Any(None, {str: Self}),
|
||||
})
|
||||
return_contains_schema = Any(
|
||||
All(
|
||||
|
|
@ -763,27 +755,23 @@ def return_schema(for_collection, plugin_type='module'):
|
|||
Schema(type(None)),
|
||||
)
|
||||
|
||||
# This generates list of dicts with keys from string_types and return_contains_schema value
|
||||
# for example in Python 3: {str: return_contains_schema}
|
||||
list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types]
|
||||
|
||||
return_option_schema = dict(basic_return_option_schema)
|
||||
return_option_schema.update({
|
||||
'contains': Any(None, *list_dict_return_contains_schema),
|
||||
'contains': Any(None, {str: return_contains_schema}),
|
||||
})
|
||||
if plugin_type == 'module':
|
||||
# 'returned' is required on top-level
|
||||
del return_option_schema['returned']
|
||||
return_option_schema[Required('returned')] = Any(*string_types)
|
||||
return_option_schema[Required('returned')] = str
|
||||
return Any(
|
||||
All(
|
||||
Schema(
|
||||
{
|
||||
any_string_types: return_option_schema
|
||||
str: return_option_schema
|
||||
}
|
||||
),
|
||||
Schema({any_string_types: return_contains}),
|
||||
Schema({any_string_types: partial(version_added, error_code='option-invalid-version-added')}),
|
||||
Schema({str: return_contains}),
|
||||
Schema({str: partial(version_added, error_code='option-invalid-version-added')}),
|
||||
),
|
||||
Schema(type(None)),
|
||||
)
|
||||
|
|
@ -840,7 +828,7 @@ def author(value):
|
|||
value = [value]
|
||||
|
||||
for line in value:
|
||||
if not isinstance(line, string_types):
|
||||
if not isinstance(line, str):
|
||||
continue # let schema checks handle
|
||||
m = author_line.search(line)
|
||||
if not m:
|
||||
|
|
@ -868,14 +856,14 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
|
|||
'requirements': [doc_string],
|
||||
'todo': Any(None, doc_string_or_strings),
|
||||
'options': Any(None, *list_dict_option_schema(for_collection, plugin_type)),
|
||||
'extends_documentation_fragment': Any(list_string_types, *string_types),
|
||||
'extends_documentation_fragment': Any(list_string_types, str),
|
||||
'version_added_collection': collection_name,
|
||||
}
|
||||
if plugin_type == 'module':
|
||||
doc_schema_dict[Required('author')] = All(Any(None, list_string_types, *string_types), author)
|
||||
doc_schema_dict[Required('author')] = All(Any(None, list_string_types, str), author)
|
||||
else:
|
||||
# author is optional for plugins (for now)
|
||||
doc_schema_dict['author'] = All(Any(None, list_string_types, *string_types), author)
|
||||
doc_schema_dict['author'] = All(Any(None, list_string_types, str), author)
|
||||
if plugin_type == 'callback':
|
||||
doc_schema_dict[Required('type')] = Any('aggregate', 'notification', 'stdout')
|
||||
|
||||
|
|
@ -896,9 +884,9 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
|
|||
schema = {
|
||||
'description': doc_string_or_strings,
|
||||
'details': doc_string_or_strings,
|
||||
'support': any_string_types,
|
||||
'version_added_collection': any_string_types,
|
||||
'version_added': any_string_types,
|
||||
'support': str,
|
||||
'version_added_collection': str,
|
||||
'version_added': str,
|
||||
}
|
||||
if more:
|
||||
schema.update(more)
|
||||
|
|
@ -907,7 +895,7 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
|
|||
doc_schema_dict['attributes'] = Schema(
|
||||
All(
|
||||
Schema({
|
||||
any_string_types: {
|
||||
str: {
|
||||
Required('description'): doc_string_or_strings,
|
||||
Required('support'): Any('full', 'partial', 'none', 'N/A'),
|
||||
'details': doc_string_or_strings,
|
||||
|
|
@ -917,12 +905,12 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi
|
|||
}, extra=ALLOW_EXTRA),
|
||||
partial(version_added, error_code='attribute-invalid-version-added', accept_historical=False),
|
||||
Schema({
|
||||
any_string_types: add_default_attributes(),
|
||||
str: add_default_attributes(),
|
||||
'action_group': add_default_attributes({
|
||||
Required('membership'): list_string_types,
|
||||
}),
|
||||
'platform': add_default_attributes({
|
||||
Required('platforms'): Any(list_string_types, *string_types)
|
||||
Required('platforms'): Any(list_string_types, str)
|
||||
}),
|
||||
}, extra=PREVENT_EXTRA),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -31,7 +31,6 @@ import yaml.reader
|
|||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.common.yaml import SafeLoader
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
|
||||
|
||||
|
|
@ -211,7 +210,7 @@ def parse_isodate(v, allow_date):
|
|||
msg = 'Expected ISO 8601 date string (YYYY-MM-DD) or YAML date'
|
||||
else:
|
||||
msg = 'Expected ISO 8601 date string (YYYY-MM-DD)'
|
||||
if not isinstance(v, string_types):
|
||||
if not isinstance(v, str):
|
||||
raise ValueError(msg)
|
||||
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
|
||||
# we have to do things manually.
|
||||
|
|
|
|||
|
|
@ -18,8 +18,6 @@ from __future__ import annotations
|
|||
|
||||
import re
|
||||
|
||||
from ansible.module_utils.six import text_type
|
||||
|
||||
|
||||
_UNSAFE_C = re.compile(u'[\\s\t"]')
|
||||
_UNSAFE_CMD = re.compile(u'[\\s\\(\\)\\^\\|%!"<>&]')
|
||||
|
|
@ -30,7 +28,7 @@ _UNSAFE_CMD = re.compile(u'[\\s\\(\\)\\^\\|%!"<>&]')
|
|||
_UNSAFE_PWSH = re.compile(u"(['\u2018\u2019\u201a\u201b])")
|
||||
|
||||
|
||||
def quote_c(s): # type: (text_type) -> text_type
|
||||
def quote_c(s): # type: (str) -> str
|
||||
"""Quotes a value for the raw Win32 process command line.
|
||||
|
||||
Quotes a value to be safely used by anything that calls the Win32
|
||||
|
|
@ -40,7 +38,7 @@ def quote_c(s): # type: (text_type) -> text_type
|
|||
s: The string to quote.
|
||||
|
||||
Returns:
|
||||
(text_type): The quoted string value.
|
||||
(str): The quoted string value.
|
||||
"""
|
||||
# https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way
|
||||
if not s:
|
||||
|
|
@ -62,7 +60,7 @@ def quote_c(s): # type: (text_type) -> text_type
|
|||
return u'"{0}"'.format(s)
|
||||
|
||||
|
||||
def quote_cmd(s): # type: (text_type) -> text_type
|
||||
def quote_cmd(s): # type: (str) -> str
|
||||
"""Quotes a value for cmd.
|
||||
|
||||
Quotes a value to be safely used by a command prompt call.
|
||||
|
|
@ -71,7 +69,7 @@ def quote_cmd(s): # type: (text_type) -> text_type
|
|||
s: The string to quote.
|
||||
|
||||
Returns:
|
||||
(text_type): The quoted string value.
|
||||
(str): The quoted string value.
|
||||
"""
|
||||
# https://docs.microsoft.com/en-us/archive/blogs/twistylittlepassagesallalike/everyone-quotes-command-line-arguments-the-wrong-way#a-better-method-of-quoting
|
||||
if not s:
|
||||
|
|
@ -92,7 +90,7 @@ def quote_cmd(s): # type: (text_type) -> text_type
|
|||
return u'^"{0}^"'.format(s)
|
||||
|
||||
|
||||
def quote_pwsh(s): # type: (text_type) -> text_type
|
||||
def quote_pwsh(s): # type: (str) -> str
|
||||
"""Quotes a value for PowerShell.
|
||||
|
||||
Quotes a value to be safely used by a PowerShell expression. The input
|
||||
|
|
@ -102,7 +100,7 @@ def quote_pwsh(s): # type: (text_type) -> text_type
|
|||
s: The string to quote.
|
||||
|
||||
Returns:
|
||||
(text_type): The quoted string value.
|
||||
(str): The quoted string value.
|
||||
"""
|
||||
# https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_quoting_rules?view=powershell-5.1
|
||||
if not s:
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ from ansible.plugins.loader import init_plugin_loader
|
|||
MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
|
||||
'ansible/module_utils/__init__.py',
|
||||
'ansible/module_utils/basic.py',
|
||||
'ansible/module_utils/six/__init__.py',
|
||||
'ansible/module_utils/_internal/__init__.py',
|
||||
'ansible/module_utils/_internal/_ansiballz/__init__.py',
|
||||
'ansible/module_utils/_internal/_ansiballz/_loader.py',
|
||||
|
|
@ -46,6 +45,7 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
|
|||
'ansible/module_utils/_internal/_traceback.py',
|
||||
'ansible/module_utils/_internal/_validation.py',
|
||||
'ansible/module_utils/_internal/_messages.py',
|
||||
'ansible/module_utils/_internal/_no_six.py',
|
||||
'ansible/module_utils/_internal/_patches/_dataclass_annotation_patch.py',
|
||||
'ansible/module_utils/_internal/_patches/_socket_patch.py',
|
||||
'ansible/module_utils/_internal/_patches/_sys_intern_patch.py',
|
||||
|
|
@ -78,7 +78,6 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
|
|||
'ansible/module_utils/errors.py',
|
||||
'ansible/module_utils/parsing/__init__.py',
|
||||
'ansible/module_utils/parsing/convert_bool.py',
|
||||
'ansible/module_utils/six/__init__.py',
|
||||
))
|
||||
|
||||
ONLY_BASIC_FILE = frozenset(('ansible/module_utils/basic.py',))
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user