diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..b7596f8 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,8 @@ +root = true + +[*] +insert_final_newline = true + +[*.py] +indent_size = 4 +indent_style = space diff --git a/kitipy/__init__.py b/kitipy/__init__.py new file mode 100644 index 0000000..717dd5c --- /dev/null +++ b/kitipy/__init__.py @@ -0,0 +1,7 @@ +from .dispatcher import Dispatcher +from .context import Context, pass_context, get_current_context, get_current_executor +from .executor import Executor, InteractiveWarningPolicy +from .groups import Task, Group, RootCommand, root, task, group +from .utils import append_cmd_flags, load_config_file, normalize_config, set_up_file_transfer_listeners, wait_for + +from . import filters diff --git a/kitipy/context.py b/kitipy/context.py new file mode 100644 index 0000000..37ead9c --- /dev/null +++ b/kitipy/context.py @@ -0,0 +1,195 @@ +import click +import subprocess +from typing import Any, Dict, List, Optional +from .dispatcher import Dispatcher +from .executor import Executor + + +class Context(object): + """Kitipy context is the global object carrying the kitipy Executor used to + ubiquitously run commands on local and remote targets, as well as the stack + and stage objects loaded by command groups and the dispatcher used to update + the CLI based on executor events. + + It's acting as a global Facade, such that you generally don't need to + interact with other kitipy or click objects. + + As both kitipy and click exposes their own Context object, you might wonder + what's the fundamental difference between them, here it is: + + * As said above, kitipy Context carry everything about how and where to + execute shell commands, on either local or remote targets. As such, it + has a central place in kitipy and is what you interact with within + kitipy tasks. + * In the other hand, the click Context is here to carry details about CLI + commands and options, and to actually parse and navigate the command + tree made of kitipy tasks or regular click commands. As kitipy is a + super-set of click features, click.Context actually embeds the + kitipy.Context object. + + You generally don't need to instantiate it by yourself, as this is + handled by RootCommand which can be created through the kitipy.root() + decorator. + """ + def __init__(self, + config: Dict, + executor: Executor, + dispatcher: Dispatcher, + stage: Optional[Dict[Any, Any]] = None, + stack=None): + """ + Args: + config (Dict): + Normalized kitipy config (see normalize_config()). + executor (kitipy.Executor): + The command executor used to ubiquitously run commands on local + and remote targets. + dispatcher (kitipy.Dispatcher): + The event dispatcher used by the executor to signal events + about file transfers and any other event that shall produce + something on the CLI. This is used to decouple SSH matters + from the CLI. + stage (Optional[Dict[Any, Any]]): + This is the config for the stage in use. + There might be no stage available when the Context is built. In + such case, it can be set afterwards. The stage can be loaded + through kitipy.load_stage(), but this is handled + automatically by creating a stack-scoped command group through + kitipy.command() or kctx.command() decorators. + stack (Optional[kitipy.docker.BaseStack]): + This is the stack object representing the Compose/Swarm stack + in use. + There might be no stack available when the Context is built. In + such case, it can be set afterwards. The stack can be loaded + through kitipy.docker.load_stack(), but this is handled + automatically by creating a stack-scoped command group through + kitipy.command() or kctx.command() decorators. + """ + self.config = config + self.stage = stage + self.stack = stack + self.executor = executor + self.dispatcher = dispatcher + + def run(self, cmd: str, **kwargs) -> subprocess.CompletedProcess: + """This method is the way to ubiquitously run a command on either local + or remote target, depending on how the executor was set. + + Args: + cmd (str): The command to run. + **kwargs: See Executor.run() options for more details. + + Raises: + paramiko.SSHException: + When the SSH client fail to run the command. Note that this + won't be raised when the command could not be found or it + exits with code > 0 though, but only when something fails at + the SSH client/server lower level. + + Returns: + subprocess.CompletedProcess + """ + return self.executor.run(cmd, **kwargs) + + def local(self, cmd: str, **kwargs) -> subprocess.CompletedProcess: + """Run a command on local host. + + This method is particularly useful when you want to run some commands + on local host whereas the Executor is running in remote mode. For + instance, you might want to check if a given git tag or some Docker + images exists on a remote repository/registry before deploying it, + or you might want to fetch the local git author name to log deployment + events somewhere. Such checks are generally better run locally. + + Args: + cmd (str): The command to run. + **kwargs: See Executor.run() options for more details. + + Raises: + paramiko.SSHException: + When the SSH client fail to run the command. Note that this + won't be raised when the command could not be found or it + exits with code > 0 though, but only when something fails at + the SSH client/server lower level. + + Returns: + subprocess.CompletedProcess + """ + return self.executor.local(cmd, **kwargs) + + def copy(self, src: str, dest: str): + """Copy a local file to a given path. If the underlying executor has + been configured to work in remote mode, the given source path will + be copied over network.""" + self.executor.copy(src, dest) + + def get_stage_names(self): + """Get the name of all stages in the configuration""" + return self.config['stages'].keys() + + def get_stack_names(self): + """Get the name of all stacks in the configuration""" + return self.config['stacks'].keys() + + @property + def is_local(self): + """Check if current kitipy Executor is in local mode""" + return self.executor.is_local + + @property + def is_remote(self): + """Check if current kitipy Executor is in remote mode""" + return self.executor.is_remote + + @property + def meta(self): + """Meta properties from current click.Context""" + return click.get_current_context().meta + + def invoke(self, *args, **kwargs): + """Call invoke() method on current click.Context""" + return click.get_current_context().invoke(*args, **kwargs) + + def echo(self, *args, **kwargs): + """Call echo() method on current click.Context""" + return click.echo(*args, **kwargs) + + def fail(self, message): + """Call fail() method on current click.Context""" + raise click.ClickException(message) + + +pass_context = click.make_pass_decorator(Context) + + +def get_current_context() -> Context: + """ + Find the current kitipy context or raise an error. + + Raises: + RuntimeError: When no kitipy context has been found. + + Returns: + Context: The current kitipy context. + """ + + click_ctx = click.get_current_context() + kctx = click_ctx.find_object(Context) + if kctx is None: + raise RuntimeError('No kitipy context found.') + return kctx + + +def get_current_executor() -> Executor: + """ + Get the executor from the current kitipy context or raise an error. + + Raises: + RuntimeError: When no kitipy context has been found. + + Returns: + Executor: The executor of the current kitipy context. + """ + + kctx = get_current_context() + return kctx.executor diff --git a/kitipy/dispatcher.py b/kitipy/dispatcher.py new file mode 100644 index 0000000..27c2d24 --- /dev/null +++ b/kitipy/dispatcher.py @@ -0,0 +1,52 @@ +from typing import Any, Callable, Dict, List + + +class Dispatcher(object): + """This dispatcher is mostly used to decouple CLI concerns from SSH/SFTP + handling. + """ + def __init__(self, listeners: Dict[str, List[Callable[..., bool]]] = {}): + """ + Args: + listeners (Dict[str, Callable[..., bool]]): + List of callables taking undefined arguments and returning a + bool associated to event names. + """ + self.__listeners = listeners + + def on(self, event_name: str, fn: Callable[..., bool]): + """Register a listener for a given event name. + + Args: + event_name (str): + Name of the event the listeners should be attached to. + fn (Callable[[Any, ...], bool]): + The event listener that should be triggered for the given event + name. + """ + + if event_name not in self.__listeners: + self.__listeners[event_name] = [] + + self.__listeners[event_name].append(fn) + + def emit(self, event_name: str, **kwargs: Any): + """Trigger all the event listeners registered for a given event name. + + This dispatcher doesn't support listener priority, so the event + listeners are called in the order they've been registered. + Listeners can either inform the Dispatcher to continue the event + propagation, by returning True, or stop it by returning anything else + or nothing. + + Args: + event_name (str): Name of the emitted event + **kwargs: Any arguments associated with the event + """ + + if event_name not in self.__listeners: + return + + for fn in self.__listeners[event_name]: + if not fn(**kwargs): + return diff --git a/kitipy/executor.py b/kitipy/executor.py new file mode 100644 index 0000000..aa07637 --- /dev/null +++ b/kitipy/executor.py @@ -0,0 +1,516 @@ +import click +import os.path +import paramiko +import random +import string +import subprocess +import sys +from typing import Any, Dict, Optional +from .dispatcher import Dispatcher + + +class Executor(object): + """Executor provides a common abstraction to execute commands and + manipulate files on both local computer and remote machines. + + It can be used either in local mode, when instantiated with no hostname, or + in remote mode, when a hostname is provided. In remote mode, it uses a + SSH/SFTP client to do its job. Remote connections are lazily opened when + the first command is run or when the first file is copied. + + The SSH/SFTP connections are automatically closed when the executor got + destroyed. + """ + def __init__(self, + basedir: str, + dispatcher: Dispatcher, + hostname: Optional[str] = None, + ssh_config_file: str = '~/.ssh/config', + paramiko_config: Dict[str, Any] = {}): + """ + Args: + basedir (str): + Base directory where commands should be executed. Most generally, + for local executors, this is the current working directory. For + remote executors, this is generaly the base directory of your + project. + dispatcher (Dispatcher): + Event dispatcher used to signal when file transfers start/end + and signal how much data have been transfered during file + uploads. + hostname (Optional[str]): + The SSH hostname (could be an alias) to connect to. Leave empty + to use the Executor to local mode. + ssh_config_file (str): + Path to the OpenSSH client config file used by paramiko. This + path could either be relative, absolute or startin with ~/. + paramiko_config (Dict): + These are extra parameters passed to paramiko when opening the + SSH connection. This is useful to tweak paramiko-specific + parameters like look_for_key (which uses ~/.ssh/id_rsa if + other authentication mechanisms don't work). + """ + self._ssh = None + self._sftp = None + self._basedir = basedir + self._dispatcher = dispatcher + self._ssh_config = None # type: Optional[Dict[str, str]] + self._missing_host_key_policy = InteractiveWarningPolicy() + + if hostname is not None: + self._load_ssh_config(hostname, ssh_config_file, paramiko_config) + + def __del__(self): + """Close SSH/SFTP connections when the Executor is destroyed.""" + + if self._ssh is not None: + self._ssh.close() + if self._sftp is not None: + self._sftp.close() + + def _load_ssh_config(self, hostname: str, ssh_config_file: str, + paramiko_config: Dict[str, Any]): + """Load the SSH config from an OpenSSH ssh_config file for a given host + and prepare parameters used to open paramiko connection. + + Attrs: + hostname (str): + Hostname or host alias as defined in the ssh_config file. + ssh_config_file (str): + Path to the ssh_config file to load. This path can either be an + absolute path or a relative path, in which case its relative + root is the base directory of the ssh_config file. + paramiko_config (Dict[str, Any]): + Extra parameters to pass to paramiko when opening the + connection. This is useful to change default paramiko behavior + like disabling look_for_keys to not try ~/.ssh/id_rsa key by + default. + """ + ssh_config_path = os.path.expanduser(ssh_config_file) + ssh_config = paramiko.SSHConfig() + + with open(ssh_config_path) as f: + ssh_config.parse(f) + + host_config = ssh_config.lookup(hostname) + # @TODO: accept only a subset of all paramiko args (or it might be used to overwrite stage-specific parameters). + cfg = paramiko_config + # The hostname parameter for paramiko is defined here but it might be + # rewritten by the loop below if it's just an alias to another + # hostname. For instance, if a ssh_file declares a host "foobar.prod" + # server, kitipy will automatically load the config for "Host foobar.prod" + # and rewrite the hostname below with the real Hostname of this alias, + # as defined in the ssh_config file. + cfg.update({'hostname': hostname, 'port': 22}) + + for hk, ck in (('hostname', 'hostname'), ('user', 'username'), + ('port', 'port'), ('connecttimeout', 'timeout'), + ('compression', 'compress')): + if hk in host_config: + cfg[ck] = host_config[hk] + + if 'proxycommand' in host_config: + cfg['sock'] = paramiko.ProxyCommand(host_config['proxycommand']) + + if 'identityfile' in host_config: + cfg['key_filename'] = [] + + for identity_file in host_config['identityfile']: + if os.path.exists(identity_file): + cfg['key_filename'].append(identity_file) + continue + + # Relative identity files are resolved with the base directory + # of the SSH config as their relative root. + if not os.path.isabs(identity_file): + key_basedir = os.path.dirname(ssh_config_path) + identity_file = os.path.join(key_basedir, identity_file) + + cfg['key_filename'].append(identity_file) + + self._ssh_config = cfg + + def set_missing_host_key_policy(self, + policy: paramiko.MissingHostKeyPolicy): + """Set the missing_host_key_policy used by paramiko when it stumbles + upon a server with an unknown signature. + + This method has to be called before the first command is run or the + first file is copied. + + Args: + policy (paramiko.MissingHostKeyPolicy): + The missing host key policy used by paramiko when a host key + is not known by the running system. + + Raises: + RuntimeError: When an SSH command or a SFTP file has been copied. + """ + if self._ssh is not None: + raise RuntimeError( + "This method has to be called before any SSH or SFTP session is started." + ) + + self._missing_host_key_policy = policy + + # @TODO: manage private keys with passphrase + @property + def ssh(self) -> paramiko.SSHClient: + """Get previously opened SSH connection or open it. + + Raises: + RuntimeError: When the Executor is running in local mode. + paramiko.SSHException: When it fails to open the connection. + + Returns: + paramiko.SSHClient: The underlying SSH client + """ + + if self.is_local: + raise RuntimeError( + "No SSH connection available: this is a local executor.") + + if self._ssh == None: + client = paramiko.SSHClient() + client.load_system_host_keys() + client.set_missing_host_key_policy(self._missing_host_key_policy) + client.connect(**self._ssh_config) + self._ssh = client + + return self._ssh + + @property + def sftp(self) -> paramiko.SFTPClient: + """Get previously opened SFTP connection or open it. + + Raises: + RuntimeError: When the Executor is running in local mode. + paramiko.SSHException: When it fails to open the connection. + + Returns: + paramiko.SFTPClient: The underlying SFTP client + """ + + if self.is_local: + raise RuntimeError( + "No SFTP connection available: this is a local executor.") + + if self._sftp == None: + # @TODO: test what happens when both ssh/sftp connections are open and executor got destroyed (does it fail to close both?) + self._sftp = self.ssh.open_sftp() + + return self._sftp + + def local( + self, + cmd: str, + env: Optional[Dict[str, str]] = None, + cwd: Optional[str] = None, + shell: bool = True, + input: Optional[str] = None, + text: bool = True, + encoding: Optional[str] = None, + pipe: bool = False, + check: bool = True, + ) -> subprocess.CompletedProcess: + """Run a command on local host. + + This method is particularly useful when you want to run some commands + on local host whereas the Executor is running in remote mode. For + instance, you might want to check if a given git tag or some Docker + images exists on a remote repository/registry before deploying it, + or you might want to fetch the local git author name to log deployment + events somewhere. Such checks are generally better run locally. + + Args: + cmd (str): + Command and args to run. + env (Optional[Dict[str, str]]): + Env vars used to run the given cmd. When this is None (the + default value) the subprocess will inherit its env vars from + kitipy, so any env vars declared before running kitipy will be + made available for the command. + cwd (Optional[str]): + Working directory where the command should be run. When this is + None (the default value), the current working directory is used. + shell (bool): + Whether the command should be run in a shell (True by default). + input (Optional[str]): + Standard input of the subprocess. + text (bool): + Whether stdin/stdout/stderr streams should be converted from/into + strings using encoding parameter or kept in binary format. + encoding (Optional[str]): + Determine the encoding used to convert streams from/to binary format. + pipe (bool): + Whether the subprocess output should be piped to kitipy and + made available through the returned subprocess.CompletedProcess + (when True), or outputted to kitipy stdout/stderr (when False). + This is similar to subprocess.Popen(..., pipe=True). + check (bool): + Check if the executed command returns exit code 0 or raise an + error otherwise. + Raises: + subprocess.SubprocessError: When check mode is enable and the + command returns an exit code > 0. + + Returns: + subprocess.CompletedProcess + """ + cwd = cwd or self._basedir + + return subprocess.run(cmd, + env=env, + cwd=cwd, + shell=shell, + input=input, + text=text, + encoding=encoding, + stdout=subprocess.PIPE if pipe else None, + stderr=subprocess.PIPE if pipe else None, + check=check) + + # @TODO: emulate pipe/nopipe behavior for remote mode. + def _remote( + self, + cmd: str, + env: Optional[Dict[str, str]] = None, + cwd: Optional[str] = None, + input: Optional[str] = None, + text: bool = True, + encoding: Optional[str] = None, + check: bool = False, + ) -> subprocess.CompletedProcess: + """Run a command on remote host. + + Args: + cmd (str): + Command and args to run + env (Dict[str, str]): + Env vars used to run the given cmd. + cwd (Optional[str]): + Working directory where the command should be run. When this is + None (the default value), the current working directory is used. + input (Optional[str]): + If passed, it's written to command stdin. + text (bool): + Whether stdin/stdout/stderr streams should be converted from/into + strings using encoding parameter or kept in binary format. + encoding (Optional[str]): + Determine the encoding used to convert streams from/to binary format. + check (bool): + Check if the executed command returns exit code 0 or raise an + error otherwise. + Raises: + RuntimeError: When the Executor is running in local mode. + + paramiko.SSHException: + When the SSH client fail to run the command. Note that this + won't be raised when the command could not be found or it + exits with code > 0 though, but only when something fails at + the SSH client/server lower level. + + Returns: + subprocess.CompletedProcess + """ + cwd = cwd or self._basedir + + if not self.is_remote: + raise RuntimeError( + 'This Executor is running in local mode, could not run following command: %s' + % (cmd)) + + self.ssh.exec_command('cd ' + cwd) + + streams = self.ssh.exec_command(cmd, environment=env) + + if input is not None: + streams[0].write(input) + + # Following line is blocking until the remote process has ended. We can + # then retrieve the exit code and fully read stdout/stderr. + returncode = streams[0].channel.recv_exit_status() + out = streams[1].read() + err = streams[2].read() + + if text: + if encoding is None: + encoding = sys.getdefaultencoding() + out = out.decode(encoding) + err = err.decode(encoding) + + return subprocess.CompletedProcess(cmd, returncode, out, err) + + # @TODO: cmd signature have to be changed to accept list too (due to shell opts) + def run( + self, + cmd: str, + env: Optional[Dict[str, str]] = None, + cwd: Optional[str] = None, + shell: bool = True, + input: Optional[str] = None, + text: bool = True, + encoding: Optional[str] = None, + pipe: bool = False, + check: bool = True, + ) -> subprocess.CompletedProcess: + """This method is the way to ubiquitously run a command on either local + or remote target, depending on how the executor was set. More precisely, + it checks if there's a remote hostname set on the executor to know + whether the command should be locally or remotely. + + Args: + cmd (str): + Command and args to run + env (Optional[Dict[str, str]]): + Env vars used to run the given cmd. When this is None (the + default value) and the Executor is running in local mode, the + subprocess will inherit its env vars from kitipy, so any env + vars declared before running kitipy will be made available for + the command. + cwd (Optional[str]): + Working directory where the command should be run. When this is + None (the default value), in local mode, the current working + directory is used and in remote mode, the user home directory + is used. + shell (bool): + Whether the command should be run in a shell (True by default). + input (Optional[str]): + Standard input of the subprocess. + text (bool): + Whether stdin/stdout/stderr streams should be converted from/into + strings using encoding parameter or kept in binary format. + encoding (Optional[str]): + Determine the encoding used to convert streams from/to binary format. + pipe (bool): + Whether the subprocess output should be piped to kitipy and + made available through the returned subprocess.CompletedProcess + (when True), or outputted to kitipy stdout/stderr (when False). + This is similar to subprocess.Popen(..., pipe=True). + check (bool): + Check if the executed command returns exit code 0 or raise an + error otherwise. + Raises: + RuntimeError: When the Executor is running in local mode. + + paramiko.SSHException: + When the SSH client fail to run the command. Note that this + won't be raised when the command could not be found or it + exits with code > 0 though, but only when something fails at + the SSH client/server lower level. + + Returns: + subprocess.CompletedProcess + """ + if self.is_remote: + return self._remote(cmd, + env=env, + input=input, + text=text, + encoding=encoding, + check=check) + + return self.local(cmd, + env=env, + cwd=cwd, + shell=shell, + input=input, + text=text, + encoding=encoding, + pipe=pipe, + check=check) + + def copy(self, local_path: str, remote_path: str): + """This method transfers files from your computer to a remote target. + It does nothing when executed in local mode. + + This method uses the dispatcher to emit events in order to let the UI + display what's going on. + + Args: + local_path (str): Path to the file to transfer. + remote_path (str): Destination path on the remote target. + """ + if not self.is_remote: + return + + size = os.path.getsize(local_path) + label = "Transfer %s to %s" % (local_path, remote_path) + self._dispatcher.emit('file_transfer.start', size=size, label=label) + + fn = lambda current, total: self._dispatcher.emit( + 'file_transfer.update', current=current, total=total) + + try: + self.sftp.put(local_path, remote_path, callback=fn) + finally: + self._dispatcher.emit('file_transfer.end') + + def mkdtemp(self, + suffix: Optional[str] = None, + prefix: Optional[str] = None, + dir: Optional[str] = None) -> str: + """Creates a temporary file with a unique name. It runs tempfile.mkdtemp() + in local mode and uses `mktemp -d` in remote mode. It's the caller + responsibility to clean up this directory when it's not used anymore. + + Args: + suffix (Optional[str]): + Suffix of the temporary filename. + prefix (Optional[str]): + Prefix of the temporary filename. + dir (Optional[str]): + Directory where the temporary file should be created. This + defaults to the default temporary directory in local mode and + to `/tmp` in remote mode. + + Raises: + paramiko.SSHException: When running in remote mode and the command fails. + + Returns: + str: The path to the temporary directory. + """ + if self.is_local: + return tempfile.mkdtemp(suffix, prefix, dir) + + prefix = prefix or '' + suffix = suffix or '' + filename_tpl = os.path.join('/tmp', prefix + 'XXXXXXXX' + suffix) + res = self._remote("mktemp -d %s" % (filename_tpl)) + return res.stdout + + def path_exists(self, path: str) -> bool: + """Check if the given path exists. In local mode, it uses + `os.path.exists` and `ls` in remote mode. + """ + if self.is_local: + return os.path.exists(path) + + res = self._remote("ls %s 1>/dev/null 2>&1" % (path), check=False) + return res.returncode == 0 + + @property + def is_local(self) -> bool: + return self._ssh_config is None + + @property + def is_remote(self) -> bool: + return self._ssh_config is not None + + +class InteractiveWarningPolicy(paramiko.MissingHostKeyPolicy): + """InteractiveWarningPolicy implements a paramiko MissingHostKeyPolicy + that uses click.confirmation() helper to ask for confirmation when a new + host_key is detected. This is the default paramiko MissingHostKeyPolicy + used by kitipy. + """ + def missing_host_key(self, client, hostname, key): + confirm_msg = "WARNING: Host key for %s not found (%s). Do you want to add it to your ~/.ssh/known_hosts?" % ( + hostname, key) + + if not click.confirm(confirm_msg): + raise RuntimeError("Unknown host key for %s." % (hostname)) + + client._host_keys.add(hostname, key.get_name(), key) + if client._host_keys_filename is not None: + client.save_host_keys(client._host_keys_filename) diff --git a/kitipy/filters.py b/kitipy/filters.py new file mode 100644 index 0000000..69a6297 --- /dev/null +++ b/kitipy/filters.py @@ -0,0 +1,58 @@ +"""Common kitipy commands and command groups filters. + +These filters have to be used with kitipy.Command and kitipy.Group class, or +through kitipy.command() and kitipy.group() decorators. +""" + +import click +from .context import Context +from typing import Callable, Union + + +def local_only(click_ctx: click.Context) -> bool: + """Check if the current kitipy Executor is in local mode. + + Args: + click_ctx (click.Context): Current click context. + + Returns: + bool: Either the executor is running in local mode. + + It returns False if the given Click context has no kitipy Context + attached. + """ + + kctx = click_ctx.find_object(Context) + if kctx is None: + return False + + return kctx.is_local + + +def remote_only(click_ctx: click.Context): + """Check if the current kitipy Executor is in remote mode. + + Args: + click_ctx (click.Context): Current click context. + + Returns: + bool: Either the executor is running in remote mode. + + It returns False if the given Click context has no kitipy Context + attached. + """ + + kctx = click_ctx.find_object(Context) + if kctx is None: + return False + return kctx.is_remote + + +def stage_named(expected: str) -> Callable: + def only(click_ctx: click.Context) -> bool: + kctx = click_ctx.find_object(Context) # type: Union[None, Context] + if kctx is None: + return False + return kctx.stage is not None and kctx.stage['name'] == expected + + return only diff --git a/kitipy/groups.py b/kitipy/groups.py new file mode 100644 index 0000000..d8f8446 --- /dev/null +++ b/kitipy/groups.py @@ -0,0 +1,578 @@ +import click +import functools +import os +from typing import Callable, Dict, Optional +from . import filters +from .context import Context, pass_context, get_current_context +from .dispatcher import Dispatcher +from .executor import Executor +from .utils import load_config_file, normalize_config, set_up_file_transfer_listeners + + +def _fake_click_ctx() -> click.Context: + """This internal function is used to create a fake click Context. It's + used by Group.merge() to list commands from source Groups. + """ + return click.Context.__new__(click.Context) + + +class FilteredCommand(click.Command): + """FilteredCommand is like regular click.Command but it can be dynamically + disabled through a filter function. Such functions can be used to + conditionally enable a task for a specific stage or to limit it to remote + stages for instance. + + Note that only kitipy Group can filter out FilteredCommand; using + FilteredCommand with regular click Group will have no effect. + + kitipy provides some filters in kitipy.filters and kitipy.docker.filters + but you can also write your own filters if you have more advanced use-cases. + """ + def __init__(self, + name: str, + filter: Optional[Callable[[click.Context], bool]] = None, + **kwargs): + """ + Args: + name (str): + Name of the command. + filter (Optional[Callable[[click.Context], bool]]): + Filter function used to filter out the command based on click + Context. When it's not provided, it defaults to a lambda always + returning True. + Click Context is passed as argument as it's the most generic + object available (eg. everything is accessible from there). + Check native filters to know how to retrieve kitipy Context + from click Context. + **kwargs: + Accept any other parameters also supported by click.Command() + constructor. + """ + super().__init__(name, **kwargs) + self.filter = filter or (lambda _: True) + + def is_enabled(self, click_ctx: click.Context) -> bool: + """Check if the that command should be filtered out based on click Context. + Most generally, you shouldn't have to worry about this method, it's + automatically called by kitipy Group. + + Args: + click_ctx (click.Context): + The click Context passed to the underlying filter. + + Returns: + bool: Either this command should be filtered in (True) or + filtered out (False). + """ + return self.filter(click_ctx) + + def invoke(self, click_ctx: click.Context): + """Given a context, this invokes the attached callback (if it exists) + in the right way. + + Raises: + click.ClickException: When this command is filtered out. + """ + if not self.is_enabled(click_ctx): + click_ctx.fail('Command "%s" is filtered out.' % self.name) + + return super().invoke(click_ctx) + + def get_help_option(self, click_ctx: click.Context): + """This is a click.Command method overriden to implement command + filtering. + """ + help_options = self.get_help_option_names(click_ctx) + if not help_options or not self.add_help_option: + return + + def show_help(click_ctx, param, value): + """Returns the help option object when the command is not + filtered out, or raise an Error. + """ + if not self.is_enabled(click_ctx): + click_ctx.fail('Command "%s" not found.' % self.name) + + if value and not click_ctx.resilient_parsing: + click.echo(click_ctx.get_help(), color=click_ctx.color) + click_ctx.exit() + + return click.Option( # type: ignore + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help='Show this message and exit.') + + +class Group(click.Group, FilteredCommand): + """Group is like regular click.Group but it implements some ktipy-specific + features like: support for stage/stack-scoped command groups and command + filtering. + """ + def __init__(self, + name=None, + commands=None, + filter=None, + invoke_on_help: bool = False, + **attrs): + """ + Args: + name (str): + Name of the command Group. + commands: + List of commands to attach to this group. + filter (Callable): + A function to filter in/out this command group. + invoke_on_help (bool): + Whehter this group function should be calle before generatng + help message. + **attrs: + Any other constructor parameters accepted by click.Group. + """ + super().__init__(name, commands, **attrs) + self._stage_group = None + self._stack_group = None + self.filter = filter or (lambda _: True) + self.invoke_on_help = invoke_on_help + + def merge(self, *args: click.Group): + """This method can be used to merge click.Group(s), including kitipy + Groups and RootCommand, into another Group. In this way, you can + combine Groups coming from other projects/kitipy taskfiles. + + Args: + *args (click.Group): + One or many source click.Groups you want to merge in the + current Group. + """ + click_ctx = _fake_click_ctx() + for src in args: + for cmdname in src.list_commands(click_ctx): + cmd = src.get_command(click_ctx, cmdname) + self.add_command(cmd) # type: ignore + + def get_command(self, click_ctx: click.Context, cmd_name: str): + """This is a click.Group method overriden to implement + stage/stack-scoped command groups. + + Commands aren't filtered out by this method because format_command() + method calls it to display the help message. + + You generally don't need to call it by yourself. + + Raises: + KeyError: When the command is not found. + """ + kctx = click_ctx.find_object(Context) + + if cmd_name in kctx.get_stage_names(): + kctx.meta['stage'] = cmd_name + return self._stage_group + if cmd_name in kctx.get_stack_names(): + kctx.meta['stack'] = cmd_name + return self._stack_group + + cmd = super().get_command(kctx, cmd_name) + + return cmd + + def list_commands(self, click_ctx: click.Context): + """This is a click.Group method overriden to implement + stage/stack-scoped command groups and command filtering behaviors. + + You generally don't need to call it by yourself. + """ + commands = self.commands + root = click_ctx.find_root().command + + kctx = get_current_context() + stage_names = kctx.get_stage_names() + if len(stage_names) > 0 and self._stage_group is not None: + stage_vals = (self._stage_group for i in range(len(stage_names))) + stage_commands = dict(zip(stage_names, stage_vals)) + + commands = dict(commands, **stage_commands) + + stack_names = kctx.get_stack_names() + if len(stack_names) > 0 and self._stack_group is not None: + stack_vals = (self._stack_group for i in range(len(stack_names))) + stack_commands = dict(zip(stack_names, stack_vals)) + + commands = dict(commands, **stack_commands) + + filtered = {} + for cmd_name, cmd in commands.items(): + if isinstance(cmd, FilteredCommand) or isinstance(cmd, Group): + if cmd.is_enabled(click_ctx): + filtered[cmd_name] = cmd + else: + filtered[cmd_name] = cmd + + return sorted(filtered) + + def get_help(self, click_ctx: click.Context): + if self.invoke_on_help: + self.invoke_without_command = True + self.invoke(click_ctx) + return super().get_help(click_ctx) + + def command(self, *args, **kwargs): + """This decorator creates a new kitipy Command and adds it to the + current Group. See kitipy.Command() for more details about the + differences between kitipy.Command and click.Command. + + See kitipy.command() signature for more details about accepted + parameters. + + Also note that the command function that receives this decorator will + get the current kitipy.Context as + + Returns + Callable: The decorator to apply to the group function. + """ + def decorator(f): + kwargs.setdefault('cls', FilteredCommand) + cmd = command(*args, **kwargs)(_prepend_kctx_wrapper(f)) + self.add_command(cmd) + return cmd + + return decorator + + def group(self, *args, **kwargs): + """This decorator creates a new kitipy Group and adds it to the current + Group. See kitipy.Group() for more details about the differences + between kitipy.Group and click.Group. + + See kitipy.group() signature for more details about accepted + parameters. + + Returns + Callable: The decorator to apply to the group function. + """ + def decorator(f): + kwargs.setdefault('cls', Group) + cmd = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + return decorator + + def stage_group(self, use_default_group=False, **attrs): + """This decorator creates a new kitipy.Group and registers it as a + stage-scoped group on the current Group. + + As stage-scoped command groups are regular command groups, this + decorator is the only way to create a stage-scoped group. + + Args: + **attrs: Any options accepted by click.group() decorator. + """ + def decorator(f): + attrs.setdefault('cls', Group) + cmd = click.group(**attrs)(_init_stage_group_wrapper(f)) + self._stage_group = cmd + return cmd + + return decorator + + def stack_group(self, **attrs): + """This decorator creates a new kitipy.Group and registers it as a + stack-scoped group on the current Group. + + As stack-scoped command groups are regular command groups, this + decorator is the only way to create a stack-scope group. + + Args: + **attrs: Any options accepted by click.group() decorator. + """ + def decorator(f): + attrs.setdefault('cls', Group) + cmd = click.group(**attrs)(_init_stack_group_wrapper(f)) + self._stack_group = cmd + return cmd + + return decorator + + +def command(name: Optional[str] = None, **attrs): + """This decorator creates a new kitipy Command. It automatically sets the + requested filter depending on local_only/remote_only kwargs. + + Args: + name (Optional[str]): + The name of the command. The function name is used by default. + **attrs: + Any other parameters supported by click.Command is also supported. + In addition, it also supports local_only and remote_only + parameters. Both are booleans and automatially set the appropriate + filter on the command. + + Returns + Callable: The decorator to apply to the command function. + """ + if 'local_only' in attrs: + attrs['filter'] = filters.local_only + del attrs['local_only'] + + if 'remote_only' in attrs: + attrs['filter'] = filters.remote_only + del attrs['remote_only'] + + attrs.setdefault('cls', FilteredCommand) + return click.command(name, **attrs) + + +def group(name: Optional[str] = None, **attrs): + """This decorator creates a new kitipy Group. See kitipy.Group() for more + details about the differences between kitipy.Group and click.Group. + + Args: + name (Optional[str]): + The name of the group. The function name is used by default. + **attrs: + Any other parameter accepted by click.command(). + + Returns + Callable: The decorator to apply to the group function. + """ + attrs.setdefault('cls', Group) + return click.command(name, **attrs) + + +def _prepend_kctx_wrapper(f): + """This internal function creates a wrapper function automatically applied + to command body to inject the kitipy.Context as first argument. + """ + @functools.wraps(f) + def wrapper(*args, **kwargs): + # Don't add kctx if it's already in *args. This might happen when a + # command is invoked from another one. + if len(args) == 0 or not isinstance(args[0], Context): + kctx = get_current_context() + args = (kctx, ) + args + return f(*args, **kwargs) + + return wrapper + + +# @TODO: This won't work as expected if the stage-scoped group is declared +# inside of a stack-scoped group as the stack object has already a copy of the +# Executor. +def _init_stage_group_wrapper(f): + """This internal function creates a wrapper function run when a + stage-scoped group is invoked to change the command Executor set on the + kitipy Context. + + Like _prepend_kctx_wrapper(), the wrapper injects kitipy.Context as first + argument. + """ + @functools.wraps(f) + def wrapper(*args, **kwargs): + kctx = get_current_context() + stage_name = kctx.meta['stage'] + kctx.executor = _create_executor(kctx.config, stage_name, + kctx.dispatcher) + return f(kctx, *args, **kwargs) + + return wrapper + + +def _create_executor(config: Dict, stage_name: str, + dispatcher: Dispatcher) -> Executor: + """Instantiate a new executor for the given stage. + + Args: + config (Dict): + The whole kitipy config. + stage_name (str): + The name of the stage to instantiate an Executor for. + dispatcher (Dispatcher): + The dispatcher later used by the instantied Executor. + """ + + stage = config['stages'][stage_name] + + if stage.get('type', None) not in ('remote', 'local'): + raise click.BadParameter( + 'Stage "%s" has no "type" field or its value is invalid (should be either: local or remote).' + % (stage_name)) + + if stage['type'] == 'local': + # @TODO: local executor base path should be configurable through stage params + return Executor(os.getcwd(), dispatcher) + + if 'hostname' not in stage: + raise click.BadParameter( + 'Remote stage "%s" has no hostname field defined.' % (stage)) + + # @TODO: verify and explain better all the mess around basedir/cwd + basedir = stage.get('basedir', '~/') + params = { + 'hostname': stage['hostname'], + } + + if 'ssh_config' in config: + params['ssh_config_file'] = config['ssh_config'] + if 'paramiko_config' in config: + # @TODO: we shouldn't be that much permissive with paramiko config + params['paramiko_config'] = config['paramiko_config'] + + return Executor(basedir, dispatcher, **params) + + +def _init_stack_group_wrapper(f): + """This internal function creates a wrapper function run when a + stack-scoped group is invoked to load the requested stack and set it on + current kitipy Context. + + Like _prepend_kctx_wrapper(), the wrapper injects kitipy.Context as first + argument. + """ + @functools.wraps(f) + def wrapper(*args, **kwargs): + # This has to be imported here to avoid circular dependencies + from .docker.stack import load_stack + + kctx = get_current_context() + stack_name = kctx.meta['stack'] + kctx.stack = load_stack(kctx, stack_name) + return f(kctx, *args, **kwargs) + + return wrapper + + +class RootCommand(Group): + """The RootCommand is used to mark the root of kitipy command tree. It's + mostly a kitipy command group but without filter support. It's a central + piece of kitipy as it's responsible for creating the kitipy Context and the + Executor used to run local and remote commands seamlessly. + + If there's a single stage defined, it'll be used by default. If there're + multiple stages, one have to be marked as default or an error got raised. + The config have to be provided with at least one stage or an error is + raised. The normalize_config() function takes care of adding a default + local stage if none is present. + + In the same way, if there's a single stack defined, it'll be used by + default. However, if there're multiple stacks, no default stacks will be + loaded. + """ + def __init__(self, config: Dict, basedir: str = '', **kwargs): + """ + Args: + config (Dict): + Kitipy config. The constructor takes care of normalizing the + config format (see normalize_config()). + basedir (str): + This is the base directory where kitipy commands will be executed. + You generally want to use the current working directory + (eg. os.getcwd()), but in some cases you might want to run all + of your commands in a specific subdirectory of your project (for + instance if your project is composed of multiple components/services). + **kwargs: + Accept any valid argument for click.Group(). + + Raises: + RuntimeError: + If there're multiple stages defined and there're no default stage. + """ + # Root command can't be filtered out, that'd make no sense. + kwargs['filter'] = (lambda _: True) + super().__init__(**kwargs) + + config = normalize_config(config) + dispatcher = set_up_file_transfer_listeners(Dispatcher()) + + stages = config['stages'].values() + if len(stages) == 1: + stage = list(stages)[0] + if len(stages) > 1: + stage = next((stage for stage in stages if stage['default']), None) + if stage is None: + raise RuntimeError( + 'Mutiple stages are defined but none is marked as default.' + ) + if len(stages) == 0: + raise RuntimeError( + 'You have to provide a config with at least one stage.') + + executor = _create_executor(config, stage['name'], dispatcher) + self.kctx = Context(config, executor, dispatcher) + self.kctx.stage = stage + + stacks = config['stacks'].values() + if len(stacks) == 1: + from .docker.stack import load_stack + stack_cfg = list(stacks)[0] + self.kctx.stack = load_stack(self.kctx, stack_cfg['name']) + + def make_context(self, info_name, args, parent=None, **extra): + """Create a click.Context and parse remaining CLI args. + + See make_context() method from click.Group. This method does pretty + much the same job but attaches kitipy.Context to click.Context + before parsing remaning CLI args. This is needed as subcommands might + be stage/stack-dedicated command groups, in which case stages/stacks + names have to be accessed through kitipy.Context during parsing. + + You don't need to call this method by yourself. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + # Attach kitipy Context to the click Context right after it's created + # to have it available when parsing remaining CLI args. + click_ctx = click.Context(self, + info_name=info_name, + parent=parent, + **extra) + click_ctx.obj = self.kctx + + with click_ctx.scope(cleanup=False): + self.parse_args(click_ctx, args) + + return click_ctx + + +def root(config: Optional[Dict] = None, + config_file: Optional[str] = None, + basedir=None, + **kwargs): + """This decorator is used to create the kitipy RootCommand group. It loads + the given config_file if provided or uses the given config parameter. The + config_file parameter takes precedence over config. If no config is + provided, it defaults to an empty config dict. + + This is generally what you want to call to declare the root of your command + tree and use all of the kitipy features. + + Args: + config (Optional[Dict]): + Config used by kitipy. + config_file (Optional[str]): + File containing kitipy config. + basedir (Optional[str]): + The basedir where kitipy commands should be executed. If not provided, + the current working directory will be used. + **kwargs: + Any other argument supported by click.group() decorator. + + Returns: + Callable: It returns the decorator to apply to the command. + """ + if config_file is not None: + config = load_config_file(config_file) + if basedir is None: + basedir = os.getcwd() + + if config_file is None and config is None: + config = {} + + return click.group('root', + cls=RootCommand, + config=config, + basedir=basedir, + **kwargs) diff --git a/kitipy/utils.py b/kitipy/utils.py new file mode 100644 index 0000000..1826a77 --- /dev/null +++ b/kitipy/utils.py @@ -0,0 +1,245 @@ +import click +import kitipy +import os.path +import yaml +from typing import Dict + + +def load_config_file(path: str) -> Dict: + """Load the YAML config file at the given path. Note that this function + doesn't normalize the config, this is handled by normalize_config(). + + Args: + path (str): + The path to the config file to load. It could be either relative or + absolute. + + Raises: + click.BadParameter: When the given path does not exist. + + Returns: + Dict: The loaded and parsed config file + """ + + if not os.path.exists(path): + raise click.BadParameter('No file "%s" found.' % (path)) + + with open(path, 'r') as f: + config = yaml.safe_load(f) + config['path'] = path + + return config + + +def normalize_config(config: Dict) -> Dict: + """Normalize kitipy config + + It puts single stage/stack parameters into stacks/stages keys, for + mono-stack and/or mono-stage use cases. + It also takes care of creating stacks/stages keys if they're missing. + And finally it puts stage/stack name keys into stage/stack values to easily + access them later. + + For instance, a single-stage/single-stack config: + + stage: + name: dev + type: local + + stack: + name: api + type: compose + + Will be normalized into: + + stages: + dev: + name: dev + type: local + + stacks: + api: + name: api + type: compose + + Args: + config (Dict): Config to normalize. + + Returns: + Dict: The normalized config. + """ + + if 'stage' in config and 'stages' not in config: + if 'name' not in config['stage']: + raise RuntimeError('You have to set a name for your stage.') + + config['stages'] = {config['stage']['name']: config['stage']} + del config['stage'] + if 'stages' not in config: + config['stages'] = { + 'default': { + 'type': 'local', + } + } + + if 'stack' in config and 'stacks' not in config: + if 'name' not in config['stack']: + raise RuntimeError('You have to set a name for your stack.') + + config['stacks'] = {config['stack']['name']: config['stack']} + del config['stack'] + if 'stacks' not in config: + config['stacks'] = {} + + for k, v in config['stages'].items(): + v['name'] = k + for k, v in config['stacks'].items(): + v['name'] = k + + return config + + +def set_up_file_transfer_listeners(dispatcher: kitipy.Dispatcher): + """Set up listeners for file transfer progress bars. + + This is usually called by the RootCommand when it creates the event + dispatcher. However, you might want to call it if you are writing your own + RootCommand. + + Args: + dispatcher (Dispatcher): The dispatcher against which the listeners + will be registered. + """ + progressbar = None + + def on_start(size: int, label: str): + nonlocal progressbar + if progressbar is not None: + progressbar.close() + progressbar = click.progressbar(length=size, label=label) + + def on_update(current: int, total: int): + nonlocal progressbar + if progressbar is not None: + progressbar.update(current) + + def on_end(): + nonlocal progressbar + if progressbar is not None: + progressbar.close() + progressbar = None + + dispatcher.on('file_transfer.start', on_start) + dispatcher.on('file_transfer.update', on_update) + dispatcher.on('filter_transfer.end', on_end) + + +def append_cmd_flags(cmd: str, **kwargs) -> str: + """Build a command string by appending flags in \**kwargs. + + Args: + cmd (str): The base command to prefix with the given flags. + **kwargs: + List of flags to append to the command with flag names as keys and + their value as dict value. If the value is a boolean, no value is + appended to the flag. If the value is a tuple, the flag will be + repeated with each tuple value. Otherwise, the value is appended to + the flag. + Short and long flags are supported: single-letter flags are + considered as short flags (prefixed with a single dash) and other + flags are considere as long flags (prefixed with a double dash). + + Returns: + str: The given cmd with short_flags and long_flags appended. + """ + + for name, value in kwargs.items(): + name = name.replace('_', '-') + sep = ' ' if len(name) == 1 else '=' + name = '-' + name if len(name) == 1 else '--' + name + + if type(value) == tuple: + for single_value in value: + cmd = _append_cmd_flag(cmd, name, single_value, sep) + else: + cmd = _append_cmd_flag(cmd, name, value, sep) + + return cmd + + +def _append_cmd_flag(cmd: str, name: str, value, sep: str) -> str: + if type(value) == bool: + return cmd + " %s" % (name) + + return cmd + " %s%s%s" % (name, sep, value) + + +TesterResult = TypeVar('TesterResult', subprocess.CompletedProcess, bool, None) +"""TesterResult is the return type of TesterCallable. See TesterCallable for +more details. +""" +TesterCallable = Callable[[kitipy.Context], TesterResult] +"""TesterCallable are used by wait_for() function to regularly run some tests, +generally to ensure a stage is in a given state (e.g. all services are running, +the DB has been initialized, etc...). +The TesterCallable takes a kitipy.Context as argument, to help you to interact +with your stage/stack. +It has to return either a subprocess.CompletedProcess, a boolean or None. The +test is considered successful when it returns True or a CompletedProcess with a +returncode equal to 0. +""" + + +def wait_for(tester: TesterCallable, + max_checks: int, + interval: int = 1, + label: Optional[str] = None): + """This helper function will run a tester callback for max_checks times at + most, with an interval between each check. If the callback didn't return + true or a successful subprocess.CompletedProcess after max_checks retries, + an exception is thrown. + + This helps implementing some higher-level wait functions, for instance to + ensure containers are all running or to ensure a DB is initialized. + + Args: + tester (TesterCallable): + The callback to regularly run. + max_checks (int): + Number of times the tester functions should be called at most. + After that, an exception is thrown if no check were successful. + interval (int): + Interval in seconds between two retry. + label (Optional[str]): + Label to display on the CLI every time the tester function is + called. This is prefixed by "[X/max_checks]". It's also used for + the exception message when wait_for fails. It's recommended to + write it in the form of: "Wait for ". + + Raises: + click.ClickException: When max_checks is reached and no checks were successful. + """ + kctx = kitipy.get_current_context() + label = label if label is not None else 'Waiting...' + for i in range(1, max_checks, interval): + kctx.echo(message="[%d/%d] %s" % (i, max_checks, label)) + + result = None + succeeded = False + + try: + result = tester(kctx) + except subprocess.CalledProcessError as e: + succeedded = False + + if isinstance(result, bool): + succeeded = result + if isinstance(result, subprocess.CompletedProcess): + succeeded = result.returncode == 0 + + if succeeded: + return + + time.sleep(interval) + + kctx.fail("Failed to %s" % (label.lower())) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..5d81528 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,5 @@ +[mypy] +ignore_missing_imports = False + +[mypy-paramiko] +ignore_missing_imports = True diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..8774ca9 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +Click==7.0 +PyYAML==5.1.1 +paramiko==2.6.0 diff --git a/tasks.py b/tasks.py new file mode 100755 index 0000000..90efe90 --- /dev/null +++ b/tasks.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +import kitipy +import kitipy.docker +import kitipy.docker.tasks +import subprocess + +config = { + 'stacks': { + 'kitipy': { + 'file': 'tests/docker-compose.yml', + }, + }, +} + + +def local_with_venv(kctx: kitipy.Context, cmd: str, **kwargs): + return kctx.local('. .venv/bin/activate; ' + cmd, **kwargs) + + +@kitipy.root(config_file=None, config=config) +def root(): + pass + + +@root.command() +@click.option('--diff/--no-diff', 'show_diff', default=True) +@click.option('--force', is_flag=True, default=None) +def format(kctx: kitipy.Context, show_diff, force): + """Run yapf to detect style divergences and fix them.""" + confirm_message = 'Do you want to reformat your code using yapf?' + + if show_diff: + diff = local_with_venv(kctx, + 'yapf --diff -r kitipy/ tests/ tasks*.py', + check=False, + pipe=True) + kctx.echo(diff.stdout, nl=False) + + if diff.returncode != 0 and len(diff.stdout) == 0: + raise click.ClickException('Failed to properly execute yapf') + + if len(diff.stdout) == 0: + sys.exit(0) + confirm_message = 'Do you want to apply this diff?' + + if force == None: + force = click.confirm(confirm_message, default=True) + if force: + local_with_venv(kctx, 'yapf -vvv -p -i -r kitipy/ tests/ tasks*.py') + + +@root.command() +def lint(kctx: kitipy.Context): + """Run mypy, a static type checker, to detect type errors.""" + local_with_venv(kctx, 'mypy -p kitipy') + # @TODO: find a way to fix types errors in tasks files + # local_with_venv(kctx, 'mypy tasks*.py') + + +@root.group() +def test(): + pass + + +@test.command(name='all') +def test_all(kctx: kitipy.Context): + """Execute all the tests suites.""" + kctx.invoke(test_unit) + kctx.invoke(test_tasks) + + +@test.command(name='unit') +def test_unit(kctx: kitipy.Context): + # Be sure the SSH container used for tests purpose is up and running. + # @TODO: add a common way to kitipy to wait for a port to be open + kctx.invoke(kitipy.docker.tasks.up) + + expected_services = len(kctx.stack.config['services']) + # @TODO: this won't work as is with Swarm, find how to generalize that sort of tests + tester = lambda kctx: expected_services == kctx.stack.count_services( + filter=('status=running')) + kitipy.wait_for(tester, + interval=1, + max_checks=5, + label="Waiting for services start up...") + + # Host key might change if docker-compose down is used between two test run, + # thus we start by removing any existing host key. + kctx.local("ssh-keygen -R '[127.0.0.1]:2022' 1>/dev/null 2>&1") + kctx.local("ssh-keygen -R '[127.0.0.1]:2023' 1>/dev/null 2>&1") + kctx.local("ssh-keygen -R testhost 1>/dev/null 2>&1") + + # Ensure first that we're actually able to connect to SSH hosts, or + # tests will fail anyway. + kctx.local('ssh -F tests/.ssh/config testhost /bin/true 1>/dev/null 2>&1') + kctx.local('ssh -F tests/.ssh/config jumphost /bin/true 1>/dev/null 2>&1') + kctx.local( + 'ssh -F tests/.ssh/config testhost-via-jumphost /bin/true 1>/dev/null 2>&1' + ) + + local_with_venv(kctx, 'pytest tests/unit/ -vv') + + +@test.command(name='tasks') +@click.argument('suites', nargs=-1, type=str) +def test_tasks(kctx: kitipy.Context, suites: List[str]): + if len(suites) == 0: + local_with_venv(kctx, 'pytest tests/tasks/ -vv') + return + + for suite in suites: + local_with_venv(kctx, 'pytest tests/tasks/test_%s.py -vv' % (suite)) + + +@test.command(name='generate-git-tgz') +@click.option('--keep-tmp-dir', 'keep', type=bool, default=False, is_flag=True) +def test_generate_git_tgz(kctx: kitipy.Context, keep: bool): + """(Re) Generate tests/git-archive.tgz. + + This command has been implemented and used to generate the original + tests/git-archive.tgz file, which is used to test git-related helper + functions. + """ + tempdir = kctx.executor.mkdtemp() + commands = ['cd ' + tempdir, 'git init'] + commits = ( + ('foo', 'v0.1'), + ('bar', 'v0.2'), + ('baz', 'v0.3'), + ('pi', 'v0.4'), + ('yolo', 'v0.5'), + ) + + for commit, tag in commits: + commands.append('touch ' + commit) + commands.append('git add ' + commit) + commands.append('git commit -m "%s"' % (commit)) + commands.append('git tag %s HEAD' % (tag)) + + basedir = os.path.dirname(os.path.abspath(__file__)) + tgz_path = os.path.join(basedir, 'tests', 'tasks', 'testdata', + 'git-repo.tgz') + commands.append('tar zcf %s .' % (tgz_path)) + + try: + kctx.run(' && '.join(commands)) + finally: + if not keep: + kctx.run('rm -rf %s' % (tempdir)) + + +root.add_command(kitipy.docker.tasks.compose) + +if __name__ == "__main__": + root() diff --git a/tests/.ssh/authorized_keys b/tests/.ssh/authorized_keys new file mode 100644 index 0000000..d3851d1 --- /dev/null +++ b/tests/.ssh/authorized_keys @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDagULwOwfvHhyq3ST6G4MmqcV8E5hNGU0VFtBhE/N9Vbaj6nLhxqJeqhQqve9bZGvVvVT/zG3acxime2BOglZnFo/wBZaKt/Mi7DZKQl+Bo7sjgSUF14VizSxyLmRsIpMFUrRAxAPu5lHE/Ksc8T+5Pl+rslSlmB1xVbAfGbYeL+Jt1jv3umwJ9gaN3W31w1ESDSGeYsVYwv4bW7Jr6E85UqdnKwazzNrDq+filcLQv+kYsIAxDFdzd1ZHyX1yjw616KK+tUnoojsNhPqfuYEqDJdWkbBrBMi9YtyrN3B1jD2/aVuq0Axw+cTTjf7oqQFMJbwyJEmM6EDm8oFCVM37v1ZY+UztHaYAlA9GbZHl7jvLoeWs8wI+FgHdMYOhmaDNF/flopkAwpyz5XQGAyFRaTHkfwt3CpwQw3mUZRx/jCJj4Upi9rL3rCZfZ4bHNEKsUWtau9pR7uByu3ikjcstc8ebi/H5Buq6H5hfJS74Lh8YqQui8C8DXI+p1t0TzMc= kitipy-tests diff --git a/tests/.ssh/config b/tests/.ssh/config new file mode 100644 index 0000000..582c8fb --- /dev/null +++ b/tests/.ssh/config @@ -0,0 +1,21 @@ +Host testhost + Hostname 127.0.0.1 + Port 2022 + User app + IdentityFile tests/.ssh/id_rsa + StrictHostKeyChecking no + +Host jumphost + Hostname 127.0.0.1 + Port 2023 + User app + IdentityFile tests/.ssh/id_rsa + StrictHostKeyChecking no + +Host testhost-via-jumphost + Hostname testhost + Port 22 + User app + IdentityFile tests/.ssh/id_rsa + StrictHostKeyChecking no + ProxyCommand ssh -F tests/.ssh/config -W %h:%p jumphost diff --git a/tests/.ssh/id_rsa b/tests/.ssh/id_rsa new file mode 100644 index 0000000..888f65f --- /dev/null +++ b/tests/.ssh/id_rsa @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG4wIBAAKCAYEA2oFC8DsH7x4cqt0k+huDJqnFfBOYTRlNFRbQYRPzfVW2o+py +4caiXqoUKr3vW2Rr1b1U/8xt2nMYpntgToJWZxaP8AWWirfzIuw2SkJfgaO7I4El +BdeFYs0sci5kbCKTBVK0QMQD7uZRxPyrHPE/uT5fq7JUpZgdcVWwHxm2Hi/ibdY7 +97psCfYGjd1t9cNREg0hnmLFWML+G1uya+hPOVKnZysGs8zaw6vn4pXC0L/pGLCA +MQxXc3dWR8l9co8OteiivrVJ6KI7DYT6n7mBKgyXVpGwawTIvWLcqzdwdYw9v2lb +qtAMcPnE043+6KkBTCW8MiRJjOhA5vKBQlTN+79WWPlM7R2mAJQPRm2R5e47y6Hl +rPMCPhYB3TGDoZmgzRf35aKZAMKcs+V0BgMhUWkx5H8LdwqcEMN5lGUcf4wiY+FK +Yvay96wmX2eGxzRCrFFrWrvaUe7gcrt4pI3LLXPHm4vx+Qbquh+YXyUu+C4fGKkL +ovAvA1yPqdbdE8zHAgMBAAECggGAYt5WQYkUX/gKFK4XyyujvlzXRPtA0LIAFOjL +xQFYDYA5MhYlgU91rMvccDKdGYdo27/dtBj2JAAsSThOLXgATIkC/CIrKf1uAIFN +V3CM8oX5aWlILUqFylz4HXosalmCmMgBU8Td2r1QhoeljWUD5nf4piH89yjLObsC +BDCIHUShef2UCNXLiP5PnOVymd+U0UwryBT9ItqoTAzDwNvO7SsfIClx0O2Cl0Mz +m20UR4xuloNWndrtvwtieP0MzEc6FJkIH8/AcehAqFGkKKXf8KOQGL9UMlIVQcA3 +CJ/B8w308e+YNKieX/4+h12YipC6uY7FVa7zt8S5+ElPHMbOwYNkXMjY7cFdttwW +qEmCqSekvBGgVIaE3y5Hu+CKhXBjichfeidD2eGZdolvApml1xKOCSAtH56g+6fs +AFmjwHNg43tmlVRLpzNvJmMb1uhevl39SoaemfRKau+YXgd5fvu07KbFZbUml8Vh +lyzhQPYOYJTP2SOul4+oIiOfCVSxAoHBAPWs6tYmqWCs5d3nuflqcDj3AWKzRV3M +XIergETo/+5lJ5IzFJ4Nv3FK+CA9al2ZIskvO/RQ0R79DYMjIiQx2AN9gJOzSVAD +ELNn4vk0ozIJhcrK29VoBYv0Wa7DKGZ0SQ0uh5EUeJnWLF6tsVnC7A7T1V35/Ywz +TRYe8O7uIx1b2iuw9cbLQZ5kjhxVoWsWWG5D6GyRy2IHJ7CQEQ9QVo70ZTh3Ui/D +9EKFyFUhIvLW/iEJrQGpKLVMdwaEhBrGCwKBwQDjsAgnD3ZWH4ryXrHFzSc6Of4z +aXOVfzdTfK9axBndJ4kZQ+79+Ou8OFEzmTxL27K0rOx1qUK+aHdkIIan2WmdAZKj +wAizr1mTvVSu4FVcAgMQImZ0U4jYS9RUIyA3ekg1fiNjrHH9hUgEHNRnj9Ecu8BB +ZyXZeUqBOMfn6BlKOy66TSWFGFIwNlNxKtfSw/9r0W3uQFsktqqXBa9TWupDq0rD +qwavqz7fOvolRkHxfZpJ3dVm3w1j9CmuW0xgtbUCgcEAu0w1TUb0YsvuA5ui4VAQ +dp155PGftL8U3CoYBHFOjUV5QbDHyyfYCFuLh6jK6sjRCGiGYOEsTV/HaW7jnIyT +jzWjTPemsOVlWiGdnThQItm3sr+tkalYcTa9GMuYhWOqPe53VFgzCbWK5jOtvi0p +IrTFOnxYnQaPgoncTYwoIT3huuWSuxhC0qr+3QM7qRTEHmyPShZs5DNoQufS2cX5 +98rDUKVHdzsBgaXU8XHt3w2heTaTO2znqDC/naxxuqe5AoHAE7raSgA2AeqD0NLR +1tEbOBl0ZKIbqPW4JpzkWRorH5XgdHcFi655RvseVjlOF0i3aJ6fcuay6OsGhX+o +C310gG11dokad++W9I1wfCLmYq2t9ET22devQQb5JD1S+lu3TCNdD7MOwag+AFU7 +NqootbOzSjeU+Q5gnWf9MqRTJLKSLwSmXq0n2hOrEWBkCViAlqg0PXQIMds+Vw3g +DnoPWMPMG76DOs+9B/0FVhuLdVCsAQCfEGX9tA6hWgGBUdQNAoHAaSD7T/wmybEc +Mp6OHEZ7o87MkT2AMgGCvMLygkbFgImVpgBjbnzofy4G/+Frh6QdnS1u2vq/fCfb +R14F6KB1B2I07e5Npk13GspKj6rOfVMqUELUCjQ0DEjQCZoUO9TdXzkbFpKH9hWo +IRmVgR8uattSvlyHSWsZLFW+I4LmBVIlkI+kY3ajMBTZIYvVYC4/3igv2WZynRFl +Ie0aRLp2LgKt7KbMk7NLXSULYYVjQt9cF00JC1anZRaJrhVekgq6 +-----END RSA PRIVATE KEY----- diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml new file mode 100644 index 0000000..8cbd940 --- /dev/null +++ b/tests/docker-compose.yml @@ -0,0 +1,20 @@ +version: '2' + +services: + testhost: + image: ajoergensen/openssh-server + environment: + - APP_PASSWORD=lorem + volumes: + - './.ssh/authorized_keys:/config/.ssh/authorized_keys' + ports: + - '127.0.0.1:2022:22' + + jumphost: + image: ajoergensen/openssh-server + environment: + - APP_PASSWORD=lorem + volumes: + - './.ssh/authorized_keys:/config/.ssh/authorized_keys' + ports: + - '127.0.0.1:2023:22' diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py new file mode 100644 index 0000000..965a846 --- /dev/null +++ b/tests/unit/test_context.py @@ -0,0 +1,78 @@ +import click +import kitipy +import pytest +import subprocess +from unittest import mock +from kitipy.context import * + + +def test_get_current_context(): + with mock.patch('click.get_current_context') as mock_get_current_click_ctx: + click_ctx = mock.Mock(spec=click.Context) + kctx = mock.Mock(spec=kitipy.Context) + + mock_get_current_click_ctx.return_value = click_ctx + click_ctx.find_object.return_value = kctx + + returned = get_current_context() + + assert returned is kctx + click_ctx.find_object.assert_called_with(Context) + + +def test_get_current_context_fails(): + with mock.patch('click.get_current_context') as mock_get_current_click_ctx: + click_ctx = mock.Mock(spec=click.Context) + kctx = mock.Mock(spec=kitipy.Context) + + mock_get_current_click_ctx.return_value = click_ctx + click_ctx.find_object.return_value = None + + with pytest.raises(RuntimeError): + get_current_context() + + +def test_get_current_executor(): + with mock.patch('click.get_current_context') as mock_get_current_click_ctx: + click_ctx = mock.Mock(spec=click.Context) + kctx = mock.Mock(spec=kitipy.Context) + kctx.executor = mock.Mock(spec=kitipy.Executor) + + mock_get_current_click_ctx.return_value = click_ctx + click_ctx.find_object.return_value = kctx + + returned = get_current_executor() + + assert returned is kctx.executor + click_ctx.find_object.assert_called_with(Context) + + +def test_get_current_executor_fails(): + with mock.patch('click.get_current_context') as mock_get_current_click_ctx: + click_ctx = mock.Mock(spec=click.Context) + + mock_get_current_click_ctx.return_value = click_ctx + click_ctx.find_object.return_value = None + + with pytest.raises(RuntimeError): + get_current_executor() + + +def test_context_run(): + dispatcher = mock.Mock(spec=kitipy.Dispatcher) + executor = mock.Mock(spec=kitipy.Executor) + kctx = kitipy.Context({}, executor, dispatcher) + + kctx.run("some cmd", env={"FOO": "bar"}, pipe=True, check=False) + + executor.run.assert_called_once_with( + 'some cmd', + env={"FOO": "bar"}, + cwd=None, + shell=True, + input=None, + text=True, + encoding=None, + pipe=True, + check=False, + ) diff --git a/tests/unit/test_dispatcher.py b/tests/unit/test_dispatcher.py new file mode 100644 index 0000000..0de4d2c --- /dev/null +++ b/tests/unit/test_dispatcher.py @@ -0,0 +1,31 @@ +import pytest +import kitipy +from unittest import mock + + +def test_dispatcher_calls_multiple_listener(): + listener1 = mock.Mock(return_value=True) + listener2 = mock.Mock(return_value=True) + + dispatcher = kitipy.Dispatcher() + dispatcher.on('test', listener1) + dispatcher.on('test', listener2) + + dispatcher.emit('test', some='args') + + listener1.assert_called_once_with(some='args') + listener2.assert_called_once_with(some='args') + + +def test_dispatcher_calls_early_stop(): + listener1 = mock.Mock(return_value=False) + listener2 = mock.Mock(return_value=True) + + dispatcher = kitipy.Dispatcher() + dispatcher.on('test', listener1) + dispatcher.on('test', listener2) + + dispatcher.emit('test', some='args') + + listener1.assert_called_once_with(some='args') + listener2.assert_not_called() diff --git a/tests/unit/test_executor.py b/tests/unit/test_executor.py new file mode 100644 index 0000000..646c3aa --- /dev/null +++ b/tests/unit/test_executor.py @@ -0,0 +1,102 @@ +import kitipy +import os.path +import paramiko +import pathlib +import pytest +import shutil +import socket +import tempfile +from kitipy.executor import InteractiveWarningPolicy +from unittest import mock + + +@pytest.fixture(params=["local", "remote", "remote_with_jumphost"]) +def executor(request): + dispatcher = kitipy.Dispatcher() + + ssh_config_file = os.path.join(os.path.dirname(__file__), '..', '.ssh', + 'config') + ssh_config_file = os.path.abspath(ssh_config_file) + + if request.param == "local": + basedir = tempfile.mkdtemp() + yield kitipy.Executor(basedir, dispatcher) + shutil.rmtree(basedir) + return + + hostname = "testhost" + if request.param == "remote_with_jumphost": + hostname = 'testhost-via-jumphost' + + executor = kitipy.Executor(str(pathlib.Path.home()), + dispatcher, + hostname=hostname, + ssh_config_file=ssh_config_file, + paramiko_config={ + 'look_for_keys': False, + }) + executor.set_missing_host_key_policy(paramiko.AutoAddPolicy) + yield executor + + +def executors_run_testdata(): + return ( + ("echo yolo", 0, "yolo\n", ""), + ("/bin/false", 1, "", ""), + ) + + +@pytest.mark.parametrize("cmd, returncode, stdout, stderr", + executors_run_testdata()) +def test_executors_run(executor: kitipy.Executor, cmd: str, returncode: int, + stdout: str, stderr: str): + returned = executor.run(cmd, pipe=True, check=False) + + assert returned.returncode == returncode + assert returned.stdout == stdout + assert returned.stderr == stderr + + +def test_executors_local(executor: kitipy.Executor): + returned = executor.local('hostname', pipe=True, check=False) + + assert returned.returncode == 0 + assert returned.stdout == "%s\n" % (socket.gethostname()) + assert returned.stderr == '' + + +def test_interactive_warning_policy_confirmed(): + policy = InteractiveWarningPolicy() + + with mock.patch('click.confirm') as confirm: + confirm.return_value = True + client = mock.Mock(spec=paramiko.SSHClient) + client._host_keys = mock.Mock(spec=paramiko.HostKeys) + client._host_keys_filename = "some_file" + + key = mock.Mock(spec=paramiko.PKey) + key.get_name.return_value = "key name" + + policy.missing_host_key(client, "[localhost]:2022", key) + + confirm.assert_called_once() + client._host_keys.add.assert_called_once_with("[localhost]:2022", + "key name", key) + client.save_host_keys.assert_called_once_with("some_file") + + +def test_interactive_warning_policy_refused(): + policy = InteractiveWarningPolicy() + + with mock.patch('click.confirm') as confirm: + confirm.return_value = False + client = mock.Mock(spec=paramiko.SSHClient) + client._host_keys = mock.Mock(spec=paramiko.HostKeys) + key = mock.Mock(spec=paramiko.PKey) + + with pytest.raises(RuntimeError): + policy.missing_host_key(client, "[localhost]:2022", key) + + confirm.assert_called_once() + client._host_keys.add.assert_not_called() + client.save_host_keys.assert_not_called() diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py new file mode 100644 index 0000000..8e3242d --- /dev/null +++ b/tests/unit/test_filters.py @@ -0,0 +1,44 @@ +import kitipy +import pytest +from kitipy.filters import * +from unittest import mock + + +def test_local_only(): + click_ctx = mock.Mock(spec=click.Context) + kctx = mock.Mock(spec=kitipy.Context) + + click_ctx.find_object.return_value = kctx + kctx.is_remote = mock.PropertyMock() + + type(kctx).is_local = mock.PropertyMock(return_value=True) + + assert local_only(click_ctx) == True + + +def test_local_only_when_no_kitipy_context_available(): + click_ctx = mock.Mock(spec=click.Context) + kctx = mock.Mock(spec=kitipy.Context) + + click_ctx.find_object.return_value = None + + assert local_only(click_ctx) == False + + +def test_remote_only(): + click_ctx = mock.Mock(spec=click.Context) + kctx = mock.Mock(spec=kitipy.Context) + click_ctx.find_object.return_value = kctx + + type(kctx).is_remote = mock.PropertyMock(return_value=False) + + assert remote_only(click_ctx) == False + + +def test_remote_only_when_no_kitipy_context_available(): + click_ctx = mock.Mock(spec=click.Context) + kctx = mock.Mock(spec=kitipy.Context) + + click_ctx.find_object.return_value = None + + assert remote_only(click_ctx) == False diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py new file mode 100644 index 0000000..a37dcd2 --- /dev/null +++ b/tests/unit/test_utils.py @@ -0,0 +1,145 @@ +import pytest +from unittest.mock import Mock +from kitipy.utils import * +import kitipy.dispatcher + + +def append_cmd_flags_testdata(): + return [ + ("foo", { + "d": True + }, "foo -d"), + ("foo", { + "f": "config.yaml" + }, "foo -f config.yaml"), + ("foo", { + "some": "flag" + }, "foo --some=flag"), + ("foo", { + "filter": ("a", "b") + }, "foo --filter=a --filter=b"), + ("foo", { + "some-bool": True + }, "foo --some-bool"), + ("foo", { + "some-float": 3.141592 + }, "foo --some-float=3.141592"), + ] + + +@pytest.mark.parametrize("cmd, flags, expected", append_cmd_flags_testdata()) +def test_append_cmd_flags(cmd, flags, expected): + returned = append_cmd_flags(cmd, **flags) + assert returned == expected + + +def test_set_up_file_transfer_listeners(): + dispatcher = Mock(spec=kitipy.dispatcher.Dispatcher) + + set_up_file_transfer_listeners(dispatcher) + + assert dispatcher.on.call_count == 3 + + +def normalize_config_testdata(): + return [ + ( + {}, + { + 'stages': { + 'default': { + 'name': 'default', + 'type': 'local', + } + }, + 'stacks': {}, + }, + ), + ( + { + 'stack': { + 'name': 'foo', + 'file': 'docker-compose.yml' + } + }, + { + 'stacks': { + 'foo': { + 'name': 'foo', + 'file': 'docker-compose.yml' + } + }, + 'stages': { + 'default': { + 'name': 'default', + 'type': 'local', + }, + }, + }, + ), + ( + { + 'stage': { + 'name': 'dev', + 'type': 'local', + } + }, + { + 'stages': { + 'dev': { + 'name': 'dev', + 'type': 'local' + } + }, + 'stacks': {}, + }, + ), + ({ + 'stacks': { + 'foo': { + 'file': 'docker-compose.yml', + }, + }, + 'stages': { + 'dev': { + 'type': 'local', + }, + }, + }, { + 'stacks': { + 'foo': { + 'name': 'foo', + 'file': 'docker-compose.yml', + }, + }, + 'stages': { + 'dev': { + 'name': 'dev', + 'type': 'local', + } + } + }), + ] + + +@pytest.mark.parametrize("config, expected", normalize_config_testdata()) +def test_normalize_config(config, expected): + normalized = normalize_config(config) + + assert config == expected + + +def test_load_config_file(): + filepath = os.path.join(os.path.dirname(__file__), "testdata/config.yml") + + config = load_config_file(filepath) + expected = { + 'path': filepath, + 'stages': { + 'dev': { + 'type': 'local', + } + } + } + + assert config == expected diff --git a/tests/unit/testdata/config.yml b/tests/unit/testdata/config.yml new file mode 100644 index 0000000..fc41ceb --- /dev/null +++ b/tests/unit/testdata/config.yml @@ -0,0 +1,3 @@ +stages: + dev: + type: local