From e449cc62be020d6fd47a15bc8ccde73698d15474 Mon Sep 17 00:00:00 2001 From: Sorin Sbarnea Date: Fri, 25 Jun 2021 15:14:56 +0100 Subject: [PATCH] Import shared code from ansiblelint (#5) This change should move all the shared code from ansiblelint and allow removed of molecule on ansible-lint. Both with import the code from us. --- .flake8 | 61 +++++ .pre-commit-config.yaml | 16 +- .pylintrc | 10 + constraints.txt | 8 + examples/reqs_v1/requirements.yml | 2 + examples/reqs_v2/requirements.yml | 5 + mypy.ini | 10 + setup.cfg | 5 +- src/ansible_compat/config.py | 81 ++++++ src/ansible_compat/constants.py | 28 ++ src/ansible_compat/loaders.py | 10 + src/ansible_compat/prerun.py | 430 ++++++++++++++++++++++++++++++ test/test_prerun.py | 226 ++++++++++++++++ tox.ini | 11 +- 14 files changed, 897 insertions(+), 6 deletions(-) create mode 100644 .flake8 create mode 100644 examples/reqs_v1/requirements.yml create mode 100644 examples/reqs_v2/requirements.yml create mode 100644 src/ansible_compat/config.py create mode 100644 src/ansible_compat/constants.py create mode 100644 src/ansible_compat/loaders.py create mode 100644 src/ansible_compat/prerun.py create mode 100644 test/test_prerun.py diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..aaff9797 --- /dev/null +++ b/.flake8 @@ -0,0 +1,61 @@ +[flake8] + +# Don't even try to analyze these: +exclude = + # No need to traverse egg files + *.egg, + # No need to traverse egg info dir + *.egg-info, + # No need to traverse eggs directory + .eggs, + # No need to traverse our git directory + .git, + # GitHub configs + .github, + # Cache files of MyPy + .mypy_cache, + # Cache files of pytest + .pytest_cache, + # Temp dir of pytest-testmon + .tmontmp, + # Countless third-party libs in venvs + .tox, + # Occasional virtualenv dir + .venv + # VS Code + .vscode, + # There's no value in checking cache directories + __pycache__, + # Temporary build dir + build, + # This contains sdists and wheels of ansible-lint that we don't want to check + dist, + # Occasional virtualenv dir + env, + # Metadata of `pip wheel` cmd is autogenerated + pip-wheel-metadata, + +# Let's not overcomplicate the code: +max-complexity = 10 + +# Accessibility/large fonts and PEP8 friendly: +#max-line-length = 79 +# Accessibility/large fonts and PEP8 unfriendly: +max-line-length = 100 + +# The only allowed ignores are related to black and isort +# https://black.readthedocs.io/en/stable/the_black_code_style.html#line-length +# "H" are generated by hacking plugin, which is not black compatible +ignore = E203,E501,W503,H + +# Allow certain violations in certain files: +# per-file-ignores = + +# flake8-pytest-style +# PT001: +pytest-fixture-no-parentheses = true +# PT006: +pytest-parametrize-names-type = tuple +# PT007: +pytest-parametrize-values-type = tuple +pytest-parametrize-values-row-type = tuple diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e944fe3c..62e89ac7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -72,14 +72,24 @@ repos: - flake8-docstrings>=1.5.0 - flake8-pytest-style>=1.2.2 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.902 + rev: v0.910 hooks: - id: mypy # empty args needed in order to match mypy cli behavior args: ["--strict"] - additional_dependencies: [] + additional_dependencies: + - flaky + - packaging + - pytest + - tenacity + - types-PyYAML - repo: https://github.com/pre-commit/mirrors-pylint rev: v3.0.0a3 hooks: - id: pylint - additional_dependencies: [] + additional_dependencies: + - flaky + - pytest + - PyYAML + - tenacity + - typing_extensions diff --git a/.pylintrc b/.pylintrc index a26883c8..774d482f 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,3 +1,13 @@ +[BASIC] +good-names=f, # filename + i, + j, + k, + ns, # namespace + ex, + Run, + _ + [IMPORTS] preferred-modules = unittest:pytest, diff --git a/constraints.txt b/constraints.txt index 3007143a..393c5c54 100644 --- a/constraints.txt +++ b/constraints.txt @@ -8,6 +8,8 @@ attrs==21.2.0 # via pytest coverage==5.5 # via pytest-cov +flaky==3.7.0 + # via ansible-compat (setup.py) iniconfig==1.1.1 # via pytest packaging==20.9 @@ -24,6 +26,12 @@ pytest==6.2.4 # via # ansible-compat (setup.py) # pytest-cov +pyyaml==5.4.1 + # via ansible-compat (setup.py) +six==1.16.0 + # via tenacity +tenacity==7.0.0 + # via ansible-compat (setup.py) toml==0.10.2 # via # pytest diff --git a/examples/reqs_v1/requirements.yml b/examples/reqs_v1/requirements.yml new file mode 100644 index 00000000..e9c70415 --- /dev/null +++ b/examples/reqs_v1/requirements.yml @@ -0,0 +1,2 @@ +# v1 requirements test file +- src: geerlingguy.mysql diff --git a/examples/reqs_v2/requirements.yml b/examples/reqs_v2/requirements.yml new file mode 100644 index 00000000..e8a836c5 --- /dev/null +++ b/examples/reqs_v2/requirements.yml @@ -0,0 +1,5 @@ +--- +roles: + - name: geerlingguy.mysql +collections: + - name: ssbarnea.molecule diff --git a/mypy.ini b/mypy.ini index 275ad98e..f043eeb2 100644 --- a/mypy.ini +++ b/mypy.ini @@ -11,5 +11,15 @@ disallow_any_generics = True ; warn_unused_configs = True exclude = test/local-content +[mypy-ansible.release] +ignore_missing_imports = True + +[mypy-flaky] +# # https://github.com/box/flaky/issues/170 +ignore_missing_imports = True + +[mypy-pytest] +ignore_missing_imports = True + [mypy-setuptools] ignore_missing_imports = True diff --git a/setup.cfg b/setup.cfg index 14820939..ff66df4b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -60,10 +60,13 @@ setup_requires = setuptools_scm_git_archive>=1.0 # These are required in actual runtime: -; install_requires = +install_requires = + tenacity + PyYAML [options.extras_require] test = + flaky pytest pytest-cov diff --git a/src/ansible_compat/config.py b/src/ansible_compat/config.py new file mode 100644 index 00000000..d846a847 --- /dev/null +++ b/src/ansible_compat/config.py @@ -0,0 +1,81 @@ +"""Store configuration options as a singleton.""" +import os +import re +import subprocess +import sys +from functools import lru_cache +from typing import List, Optional, Tuple + +from packaging.version import Version + +from ansible_compat.constants import ANSIBLE_MISSING_RC + +# Used to store collection list paths (with mock paths if needed) +collection_list: List[str] = [] + + +@lru_cache() +def ansible_collections_path() -> str: + """Return collection path variable for current version of Ansible.""" + # respect Ansible behavior, which is to load old name if present + for env_var in ["ANSIBLE_COLLECTIONS_PATHS", "ANSIBLE_COLLECTIONS_PATH"]: + if env_var in os.environ: + return env_var + + # https://github.com/ansible/ansible/pull/70007 + if ansible_version() >= ansible_version("2.10.0.dev0"): + return "ANSIBLE_COLLECTIONS_PATH" + return "ANSIBLE_COLLECTIONS_PATHS" + + +def parse_ansible_version(stdout: str) -> Tuple[str, Optional[str]]: + """Parse output of 'ansible --version'.""" + # Ansible can produce extra output before displaying version in debug mode. + + # ansible-core 2.11+: 'ansible [core 2.11.3]' + match = re.search( + r"^ansible \[(?:core|base) (?P[^\]]+)\]", stdout, re.MULTILINE + ) + if match: + return match.group("version"), None + # ansible-base 2.10 and Ansible 2.9: 'ansible 2.x.y' + match = re.search(r"^ansible (?P[^\s]+)", stdout, re.MULTILINE) + if match: + return match.group("version"), None + return "", "FATAL: Unable parse ansible cli version: %s" % stdout + + +@lru_cache() +def ansible_version(version: str = "") -> Version: + """Return current Version object for Ansible. + + If version is not mentioned, it returns current version as detected. + When version argument is mentioned, it return converts the version string + to Version object in order to make it usable in comparisons. + """ + if version: + return Version(version) + + proc = subprocess.run( + ["ansible", "--version"], + universal_newlines=True, + check=False, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + if proc.returncode == 0: + version, error = parse_ansible_version(proc.stdout) + if error is not None: + print(error) + sys.exit(ANSIBLE_MISSING_RC) + else: + print( + "Unable to find a working copy of ansible executable.", + proc, + ) + sys.exit(ANSIBLE_MISSING_RC) + return Version(version) + + +if ansible_collections_path() in os.environ: + collection_list = os.environ[ansible_collections_path()].split(':') diff --git a/src/ansible_compat/constants.py b/src/ansible_compat/constants.py new file mode 100644 index 00000000..857c4e5a --- /dev/null +++ b/src/ansible_compat/constants.py @@ -0,0 +1,28 @@ +"""Constants used by ansible_compat.""" + + +# Minimal version of Ansible we support for runtime +ANSIBLE_MIN_VERSION = "2.9" + +# Based on https://docs.ansible.com/ansible/latest/reference_appendices/config.html +ANSIBLE_DEFAULT_ROLES_PATH = ( + "~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles" +) + +INVALID_CONFIG_RC = 2 +ANSIBLE_MISSING_RC = 4 +INVALID_PREREQUISITES_RC = 10 + +MSG_INVALID_FQRL = """\ +Computed fully qualified role name of %s does not follow current galaxy requirements. +Please edit meta/main.yml and assure we can correctly determine full role name: + +galaxy_info: +role_name: my_name # if absent directory name hosting role is used instead +namespace: my_galaxy_namespace # if absent, author is used instead + +Namespace: https://galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespace-limitations +Role: https://galaxy.ansible.com/docs/contributing/creating_role.html#role-names + +As an alternative, you can add 'role-name' to either skip_list or warn_list. +""" diff --git a/src/ansible_compat/loaders.py b/src/ansible_compat/loaders.py new file mode 100644 index 00000000..262b4266 --- /dev/null +++ b/src/ansible_compat/loaders.py @@ -0,0 +1,10 @@ +"""Utilities for loading various files.""" +from typing import Any + +import yaml + + +def yaml_from_file(filepath: str) -> Any: + """Return a loaded YAML file.""" + with open(filepath) as content: + return yaml.load(content, Loader=yaml.FullLoader) diff --git a/src/ansible_compat/prerun.py b/src/ansible_compat/prerun.py new file mode 100644 index 00000000..c3d6974b --- /dev/null +++ b/src/ansible_compat/prerun.py @@ -0,0 +1,430 @@ +"""Utilities for configuring ansible runtime environment.""" +import hashlib +import json +import logging +import os +import pathlib +import re +import subprocess +import sys +from functools import lru_cache +from typing import Any, Dict, List, Optional, Tuple, Type, Union + +import packaging +import tenacity + +from ansible_compat.config import ( + ansible_collections_path, + collection_list, + parse_ansible_version, +) +from ansible_compat.constants import ( # INVALID_CONFIG_RC, + ANSIBLE_DEFAULT_ROLES_PATH, + ANSIBLE_MIN_VERSION, + ANSIBLE_MISSING_RC, + INVALID_PREREQUISITES_RC, + MSG_INVALID_FQRL, +) +from ansible_compat.loaders import yaml_from_file + +_logger = logging.getLogger(__name__) + + +def check_ansible_presence(exit_on_error: bool = False) -> Tuple[str, str]: + """Assures we stop execution if Ansible is missing or outdated. + + Return found version and an optional exception if something wrong + was detected. + """ + + @lru_cache() + def _get_ver_err() -> Tuple[str, str]: + + err = "" + failed = False + ver = "" + result = subprocess.run( + args=["ansible", "--version"], + stdout=subprocess.PIPE, + universal_newlines=True, + check=False, + ) + if result.returncode != 0: + return ( + ver, + "FATAL: Unable to retrieve ansible cli version: %s" % result.stdout, + ) + + ver, error = parse_ansible_version(result.stdout) + if error is not None: + return "", error + try: + # pylint: disable=import-outside-toplevel + from ansible.release import __version__ as ansible_module_version + + if packaging.version.parse( + ansible_module_version + ) < packaging.version.parse(ANSIBLE_MIN_VERSION): + failed = True + except (ImportError, ModuleNotFoundError) as exc: + failed = True + ansible_module_version = "none" + err += f"{exc}\n" + if failed: + err += ( + "FATAL: We require a version of Ansible package" + " >= %s, but %s was found. " + "Please install a compatible version using the same python interpreter. See " + "https://docs.ansible.com/ansible/latest/installation_guide" + "/intro_installation.html#installing-ansible-with-pip" + % (ANSIBLE_MIN_VERSION, ansible_module_version) + ) + + elif ver != ansible_module_version: + err = ( + f"FATAL: Ansible CLI ({ver}) and python module" + f" ({ansible_module_version}) versions do not match. This " + "indicates a broken execution environment." + ) + return ver, err + + ver, err = _get_ver_err() + if exit_on_error and err: + _logger.error(err) + sys.exit(ANSIBLE_MISSING_RC) + return ver, err + + +def install_collection(collection: str, destination: Optional[str] = None) -> None: + """Install an Ansible collection. + + Can accept version constraints like 'foo.bar:>=1.2.3' + """ + cmd = [ + "ansible-galaxy", + "collection", + "install", + "-v", + ] + if destination: + cmd.extend(["-p", destination]) + cmd.append(f"{collection}") + + _logger.info("Running %s", " ".join(cmd)) + run = subprocess.run( + cmd, + universal_newlines=True, + check=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + if run.returncode != 0: + _logger.error("Command returned %s code:\n%s", run.returncode, run.stdout) + sys.exit(INVALID_PREREQUISITES_RC) + + +@tenacity.retry( # Retry up to 3 times as galaxy server can return errors + reraise=True, + wait=tenacity.wait_fixed(30), # type: ignore + stop=tenacity.stop_after_attempt(3), # type: ignore + before_sleep=tenacity.after_log(_logger, logging.WARNING), # type: ignore +) +def install_requirements(requirement: str, cache_dir) -> None: + """Install dependencies from a requirements.yml.""" + if not os.path.exists(requirement): + return + + cmd = [ + "ansible-galaxy", + "role", + "install", + "--roles-path", + f"{cache_dir}/roles", + "-vr", + f"{requirement}", + ] + + _logger.info("Running %s", " ".join(cmd)) + run = subprocess.run( + cmd, + universal_newlines=True, + check=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + if run.returncode != 0: + _logger.error(run.stdout) + raise RuntimeError(run.returncode) + + # Run galaxy collection install works on v2 requirements.yml + if "collections" in yaml_from_file(requirement): + + cmd = [ + "ansible-galaxy", + "collection", + "install", + "-p", + f"{cache_dir}/collections", + "-vr", + f"{requirement}", + ] + + _logger.info("Running %s", " ".join(cmd)) + run = subprocess.run( + cmd, + universal_newlines=True, + check=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + if run.returncode != 0: + _logger.error(run.stdout) + raise RuntimeError(run.returncode) + + +def get_cache_dir(project_dir: str) -> str: + """Compute cache directory to be used based on project path.""" + # 6 chars of entropy should be enough + cache_key = hashlib.sha256(os.path.abspath(project_dir).encode()).hexdigest()[:6] + cache_dir = "%s/ansible-compat/%s" % ( + os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache")), + cache_key, + ) + return cache_dir + + +def prepare_environment( + project_dir: Optional[str] = None, + offline: bool = False, + required_collections: Optional[Dict[str, str]] = None, +) -> None: + """Make dependencies available if needed.""" + if required_collections is None: + required_collections = {} + + if not project_dir: + project_dir = os.getcwd() + cache_dir = get_cache_dir(project_dir) + + if not offline: + install_requirements("requirements.yml", cache_dir=cache_dir) + for req in pathlib.Path(".").glob("molecule/*/requirements.yml"): + install_requirements(str(req), cache_dir=cache_dir) + + for name, min_version in required_collections.items(): + install_collection( + f"{name}:>={min_version}", + destination=f"{cache_dir}/collections" if cache_dir else None, + ) + + _install_galaxy_role(project_dir) + # _perform_mockings() + _prepare_ansible_paths(cache_dir=cache_dir) + + +def _get_galaxy_role_ns(galaxy_infos: Dict[str, Any]) -> str: + """Compute role namespace from meta/main.yml, including trailing dot.""" + role_namespace = galaxy_infos.get('namespace', "") + if len(role_namespace) == 0: + role_namespace = galaxy_infos.get('author', "") + if not isinstance(role_namespace, str): + raise RuntimeError("Role namespace must be string, not %s" % role_namespace) + # if there's a space in the name space, it's likely author name + # and not the galaxy login, so act as if there was no namespace + if re.match(r"^\w+ \w+", role_namespace): + role_namespace = "" + else: + role_namespace = f"{role_namespace}." + return role_namespace + + +def _get_galaxy_role_name(galaxy_infos: Dict[str, Any]) -> str: + """Compute role name from meta/main.yml.""" + return galaxy_infos.get('role_name', "") + + +def _get_role_fqrn(galaxy_infos: Dict[str, Any]) -> str: + """Compute role fqrn.""" + role_namespace = _get_galaxy_role_ns(galaxy_infos) + role_name = _get_galaxy_role_name(galaxy_infos) + if len(role_name) == 0: + role_name = pathlib.Path(".").absolute().name + role_name = re.sub(r'(ansible-|ansible-role-)', '', role_name) + + return f"{role_namespace}{role_name}" + + +def _install_galaxy_role(project_dir: str, role_name_check: int = 0) -> None: + """Detect standalone galaxy role and installs it. + + role_name_check levels: + 0: exit with error if name is not compliant (default) + 1: warn if name is not compliant + 2: bypass any name checking + """ + if not os.path.exists("meta/main.yml"): + return + yaml = yaml_from_file("meta/main.yml") + if 'galaxy_info' not in yaml: + return + + fqrn = _get_role_fqrn(yaml['galaxy_info']) + + if role_name_check in [0, 1]: + if not re.match(r"[a-z0-9][a-z0-9_]+\.[a-z][a-z0-9_]+$", fqrn): + msg = MSG_INVALID_FQRL.format(fqrn) + if role_name_check == 1: + _logger.warning(msg) + else: + _logger.error(msg) + sys.exit(INVALID_PREREQUISITES_RC) + else: + # when 'role-name' is in skip_list, we stick to plain role names + if 'role_name' in yaml['galaxy_info']: + role_namespace = _get_galaxy_role_ns(yaml['galaxy_info']) + role_name = _get_galaxy_role_name(yaml['galaxy_info']) + fqrn = f"{role_namespace}{role_name}" + else: + fqrn = pathlib.Path(".").absolute().name + path = pathlib.Path(f"{get_cache_dir(project_dir)}/roles") + path.mkdir(parents=True, exist_ok=True) + link_path = path / fqrn + # despite documentation stating that is_file() reports true for symlinks, + # it appears that is_dir() reports true instead, so we rely on exits(). + target = pathlib.Path(project_dir).absolute() + if not link_path.exists() or os.readlink(link_path) != str(target): + if link_path.exists(): + link_path.unlink() + link_path.symlink_to(target, target_is_directory=True) + _logger.info( + "Using %s symlink to current repository in order to enable Ansible to find the role using its expected full name.", + link_path, + ) + + +def _prepare_ansible_paths(cache_dir: str) -> None: + """Configure Ansible environment variables.""" + library_paths: List[str] = [] + roles_path: List[str] = [] + + for path_list, path in ( + (library_paths, "plugins/modules"), + (library_paths, f"{cache_dir}/modules"), + (collection_list, f"{cache_dir}/collections"), + (roles_path, "roles"), + (roles_path, f"{cache_dir}/roles"), + ): + if path not in path_list and os.path.exists(path): + path_list.append(path) + + _update_env('ANSIBLE_LIBRARY', library_paths) + _update_env(ansible_collections_path(), collection_list) + _update_env('ANSIBLE_ROLES_PATH', roles_path, default=ANSIBLE_DEFAULT_ROLES_PATH) + + +def _update_env(varname: str, value: List[str], default: str = "") -> None: + """Update colon based environment variable if needed. by appending.""" + if value: + orig_value = os.environ.get(varname, default=default) + if orig_value: + # Prepend original or default variable content to custom content. + value = [*orig_value.split(':'), *value] + value_str = ":".join(value) + if value_str != os.environ.get(varname, ""): + os.environ[varname] = value_str + _logger.info("Added %s=%s", varname, value_str) + + +def ansible_config_get(key: str, kind: Type[Any] = str) -> Union[str, List[str], None]: + """Return configuration item from ansible config.""" + env = os.environ.copy() + # Avoid possible ANSI garbage + env["ANSIBLE_FORCE_COLOR"] = "0" + # Avoid our own override as this prevents returning system paths. + colpathvar = ansible_collections_path() + if colpathvar in env: + env.pop(colpathvar) + + config = subprocess.check_output( + ["ansible-config", "dump"], universal_newlines=True, env=env + ) + + if kind == str: + result = re.search(rf"^{key}.* = (.*)$", config, re.MULTILINE) + if result: + return result.groups()[0] + elif kind == list: + result = re.search(rf"^{key}.* = (\[.*\])$", config, re.MULTILINE) + if result: + val = eval(result.groups()[0]) # pylint: disable=eval-used + if not isinstance(val, list): + raise RuntimeError(f"Unexpected data read for {key}: {val}") + return val + else: + raise RuntimeError("Unknown data type.") + return None + + +def require_collection( # noqa: C901 + name: str, + version: Optional[str] = None, + install: bool = True, + cache_dir: Optional[str] = None, +) -> None: + """Check if a minimal collection version is present or exits. + + In the future this method may attempt to install a missing or outdated + collection before failing. + """ + try: + ns, coll = name.split('.', 1) + except ValueError: + sys.exit("Invalid collection name supplied: %s" % name) + + paths = ansible_config_get('COLLECTIONS_PATHS', list) + if not paths or not isinstance(paths, list): + sys.exit(f"Unable to determine ansible collection paths. ({paths})") + + if cache_dir: + # if we have a cache dir, we want to be use that would be preferred + # destination when installing a missing collection + paths.insert(0, f"{cache_dir}/collections") + + for path in paths: + collpath = os.path.join(path, 'ansible_collections', ns, coll) + if os.path.exists(collpath): + mpath = os.path.join(collpath, 'MANIFEST.json') + if not os.path.exists(mpath): + _logger.fatal( + "Found collection at '%s' but missing MANIFEST.json, cannot get info.", + collpath, + ) + sys.exit(INVALID_PREREQUISITES_RC) + + with open(mpath, 'r') as f: + manifest = json.loads(f.read()) + found_version = packaging.version.parse( + manifest['collection_info']['version'] + ) + if version and found_version < packaging.version.parse(version): + if install: + install_collection(f"{name}:>={version}") + require_collection(name, version, install=False) + else: + _logger.fatal( + "Found %s collection %s but %s or newer is required.", + name, + found_version, + version, + ) + sys.exit(INVALID_PREREQUISITES_RC) + break + else: + if install: + install_collection(f"{name}:>={version}") + require_collection( + name=name, version=version, install=False, cache_dir=cache_dir + ) + else: + _logger.fatal("Collection '%s' not found in '%s'", name, paths) + sys.exit(INVALID_PREREQUISITES_RC) diff --git a/test/test_prerun.py b/test/test_prerun.py new file mode 100644 index 00000000..2a7db568 --- /dev/null +++ b/test/test_prerun.py @@ -0,0 +1,226 @@ +"""Tests related to prerun part of the linter.""" +# pylint: disable=protected-access +import logging +import os +import subprocess +from contextlib import contextmanager +from typing import Iterator, List + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from flaky import flaky + +from ansible_compat import prerun +from ansible_compat.constants import INVALID_PREREQUISITES_RC + + +@contextmanager +def remember_cwd(cwd: str) -> Iterator[None]: + """Context manager for chdir.""" + curdir = os.getcwd() + try: + os.chdir(cwd) + yield + finally: + os.chdir(curdir) + + +# # https://github.com/box/flaky/issues/170 +@flaky(max_runs=3) # type: ignore +def test_prerun_reqs_v1(caplog: pytest.LogCaptureFixture) -> None: + """Checks that the linter can auto-install requirements v1 when found.""" + cwd = os.path.realpath( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "..", "examples", "reqs_v1" + ) + ) + with remember_cwd(cwd): + with caplog.at_level(logging.INFO): + prerun.prepare_environment() + assert any( + msg.startswith("Running ansible-galaxy role install") for msg in caplog.messages + ) + assert all( + "Running ansible-galaxy collection install" not in msg + for msg in caplog.messages + ) + + +@flaky(max_runs=3) # type: ignore +def test_prerun_reqs_v2(caplog: pytest.LogCaptureFixture) -> None: + """Checks that the linter can auto-install requirements v2 when found.""" + cwd = os.path.realpath( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "..", "examples", "reqs_v2" + ) + ) + with remember_cwd(cwd): + with caplog.at_level(logging.INFO): + prerun.prepare_environment() + assert any( + msg.startswith("Running ansible-galaxy role install") + for msg in caplog.messages + ) + assert any( + msg.startswith("Running ansible-galaxy collection install") + for msg in caplog.messages + ) + + +def test__update_env_no_old_value_no_default_no_value(monkeypatch: MonkeyPatch) -> None: + """Make sure empty value does not touch environment.""" + monkeypatch.delenv("DUMMY_VAR", raising=False) + + prerun._update_env("DUMMY_VAR", []) + + assert "DUMMY_VAR" not in os.environ + + +def test__update_env_no_old_value_no_value(monkeypatch: MonkeyPatch) -> None: + """Make sure empty value does not touch environment.""" + monkeypatch.delenv("DUMMY_VAR", raising=False) + + prerun._update_env("DUMMY_VAR", [], "a:b") + + assert "DUMMY_VAR" not in os.environ + + +def test__update_env_no_default_no_value(monkeypatch: MonkeyPatch) -> None: + """Make sure empty value does not touch environment.""" + monkeypatch.setenv("DUMMY_VAR", "a:b") + + prerun._update_env("DUMMY_VAR", []) + + assert os.environ["DUMMY_VAR"] == "a:b" + + +@pytest.mark.parametrize( + ("value", "result"), + ( + (["a"], "a"), + (["a", "b"], "a:b"), + (["a", "b", "c"], "a:b:c"), + ), +) +def test__update_env_no_old_value_no_default( + monkeypatch: MonkeyPatch, value: List[str], result: str +) -> None: + """Values are concatenated using : as the separator.""" + monkeypatch.delenv("DUMMY_VAR", raising=False) + + prerun._update_env("DUMMY_VAR", value) + + assert os.environ["DUMMY_VAR"] == result + + +@pytest.mark.parametrize( + ("default", "value", "result"), + ( + ("a:b", ["c"], "a:b:c"), + ("a:b", ["c:d"], "a:b:c:d"), + ), +) +def test__update_env_no_old_value( + monkeypatch: MonkeyPatch, default: str, value: List[str], result: str +) -> None: + """Values are appended to default value.""" + monkeypatch.delenv("DUMMY_VAR", raising=False) + + prerun._update_env("DUMMY_VAR", value, default) + + assert os.environ["DUMMY_VAR"] == result + + +@pytest.mark.parametrize( + ("old_value", "value", "result"), + ( + ("a:b", ["c"], "a:b:c"), + ("a:b", ["c:d"], "a:b:c:d"), + ), +) +def test__update_env_no_default( + monkeypatch: MonkeyPatch, old_value: str, value: List[str], result: str +) -> None: + """Values are appended to preexisting value.""" + monkeypatch.setenv("DUMMY_VAR", old_value) + + prerun._update_env("DUMMY_VAR", value) + + assert os.environ["DUMMY_VAR"] == result + + +@pytest.mark.parametrize( + ("old_value", "default", "value", "result"), + ( + ("", "", ["e"], "e"), + ("a", "", ["e"], "a:e"), + ("", "c", ["e"], "e"), + ("a", "c", ["e:f"], "a:e:f"), + ), +) +def test__update_env( + monkeypatch: MonkeyPatch, + old_value: str, + default: str, # pylint: disable=unused-argument + value: List[str], + result: str, +) -> None: + """Defaults are ignored when preexisting value is present.""" + monkeypatch.setenv("DUMMY_VAR", old_value) + + prerun._update_env("DUMMY_VAR", value) + + assert os.environ["DUMMY_VAR"] == result + + +def test_require_collection_wrong_version() -> None: + """Tests behaviour of require_collection.""" + subprocess.check_output( + [ + "ansible-galaxy", + "collection", + "install", + "containers.podman", + "-p", + "~/.ansible/collections", + ] + ) + with pytest.raises(SystemExit) as pytest_wrapped_e: + prerun.require_collection("containers.podman", '9999.9.9') + assert pytest_wrapped_e.type == SystemExit + assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC + + +@pytest.mark.parametrize( + ("name", "version"), + ( + ("fake_namespace.fake_name", None), + ("fake_namespace.fake_name", "9999.9.9"), + ), +) +def test_require_collection_missing(name: str, version: str) -> None: + """Tests behaviour of require_collection, missing case.""" + with pytest.raises(SystemExit) as pytest_wrapped_e: + prerun.require_collection(name, version) + assert pytest_wrapped_e.type == SystemExit + assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC + + +def test_ansible_config_get() -> None: + """Check test_ansible_config_get.""" + paths = prerun.ansible_config_get("COLLECTIONS_PATHS", list) + assert isinstance(paths, list) + assert len(paths) > 0 + + +def test_install_collection() -> None: + """Check that valid collection installs do not fail.""" + prerun.install_collection("containers.podman:>=1.0") + + +def test_install_collection_fail() -> None: + """Check that invalid collection install fails.""" + with pytest.raises(SystemExit) as pytest_wrapped_e: + prerun.install_collection("containers.podman:>=9999.0") + assert pytest_wrapped_e.type == SystemExit + assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC diff --git a/tox.ini b/tox.ini index a0f8ad75..303702b8 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,7 @@ requires = pip >= 19.3.0 skip_missing_interpreters = True # `usedevelop = true` overrides `skip_install` instruction, it's unwanted -usedevelop = false +usedevelop = true [testenv] description = @@ -20,9 +20,9 @@ extras = deps = ansible29: ansible>=2.9,<2.10 ansible210: ansible-base>=2.10,<2.11 - py: ansible-core>=2.11 devel: ansible-core @ git+https://github.com/ansible/ansible.git # GPLv3+ commands = + ansible --version # We add coverage options but not making them mandatory as we do not want to force # pytest users to run coverage when they just want to run a single test with `pytest -k test` {envpython} -m pytest \ @@ -66,6 +66,7 @@ deps = pip-tools>=5.4.0 setuptools>=51.1.1 skip_install = true +usedevelop = false commands = pip-compile --extra test --output-file=constraints.txt setup.py {envpython} -m pre_commit run -a @@ -108,3 +109,9 @@ commands = twine check {toxinidir}/dist/* # Install the wheel sh -c "python3 -m pip install {toxinidir}/dist/*.whl" + +[testenv:py{39,38,37,36}] +description = Run the tests with {basepython} ansible-core 2.11+ +deps = + {[testenv]deps} + ansible-core>=2.11