Skip to content

Commit

Permalink
Merge pull request #233 from kikkomep/fix/0.8.0-beta2-issues
Browse files Browse the repository at this point in the history
Fix/0.8.0 beta2 issues
  • Loading branch information
kikkomep authored Sep 2, 2022
2 parents 7dfe30a + 3c4e14a commit 224aa0c
Show file tree
Hide file tree
Showing 12 changed files with 74 additions and 47 deletions.
18 changes: 16 additions & 2 deletions cli/client/issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@

import click
from cli.client.utils import get_repository, init_output_path
from flask.cli import with_appcontext
from lifemonitor.api.models.issues import (WorkflowRepositoryIssue,
find_issue_types, load_issue)
from lifemonitor.utils import to_snake_case
Expand Down Expand Up @@ -127,16 +128,20 @@ def check(config, repository, output_path=None):
console.print(table)

except Exception as e:
logger.exception(e)
error_console.print(str(e))


@issues_group.command(help="Test an issue type")
@click.argument('issue_file', type=click.Path(exists=True))
@click.option('-c', '--issue-class', type=str, multiple=True, default=None, )
@click.option('-w', '--write', is_flag=True, help="Write proposed changes.")
@repository_arg
@output_path_arg
@click.pass_obj
def test(config, issue_file, issue_class, repository, output_path=None):
@with_appcontext
def test(config, issue_file, issue_class, write, repository, output_path=None):
proposed_files = []
try:
logger.debug("issue classes: %r", issue_class)
init_output_path(output_path=output_path)
Expand Down Expand Up @@ -164,18 +169,27 @@ def test(config, issue_file, issue_class, repository, output_path=None):
if issue_passed:
issues.append(issue)
detected_issues = [_.name for _ in issues]

for issue in issues_list:
if issue.name not in detected_issues:
status = Text("Passed", style="green bold")
else:
status = Text("Failed", style="red bold")
issue_files = [_.path for _ in issue.get_changes(repository)]
table.add_row(issue.get_identifier(), issue.name, status,
", ".join([_.path for _ in issue.get_changes(repository)]),
", ".join(issue_files),
", ".join(issue.labels))
proposed_files.extend(issue_files)
console.print(table)
except Exception as e:
logger.exception(e)
error_console.print(str(e))
finally:
logger.debug("Write: %r -> %r", write, proposed_files)
if not write and proposed_files:
for f in proposed_files:
logger.debug("Deleting %s", f)
os.remove(f)


@issues_group.command(help="Generate a skeleton class for an issue type")
Expand Down
2 changes: 1 addition & 1 deletion k8s/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ version: 0.8.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
appVersion: 0.8.0-beta2
appVersion: 0.8.0-beta3

# Chart dependencies
dependencies:
Expand Down
17 changes: 12 additions & 5 deletions lifemonitor/api/models/registries/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,15 @@

from __future__ import annotations

import logging
from typing import Dict, List

from lifemonitor.auth import User
from lifemonitor.auth.oauth2.client.models import OAuth2Token
from sqlalchemy.orm.attributes import flag_modified

from lifemonitor.auth.oauth2.client.models import OAuth2Token
# Config a module level logger
logger = logging.getLogger(__name__)


class RegistrySettings():
Expand All @@ -37,6 +40,10 @@ def __init__(self, user: User) -> None:
self._raw_settings = {}
self.user.settings['registry_settings'] = self._raw_settings

def __update_settings__(self):
logger.debug("Current user: %r (token: %r)", self.user, getattr(self.user, "settings", None))
flag_modified(self.user, 'settings')

def get_token(self, registry: str) -> OAuth2Token:
token = self._raw_settings[registry].get('token', None) if registry in self._raw_settings else None
return OAuth2Token(token) if token else None
Expand All @@ -45,7 +52,7 @@ def set_token(self, registry: str, token: Dict):
if registry not in self._raw_settings:
raise ValueError(f"Registry {registry} not found")
self._raw_settings[registry]['token'] = token
flag_modified(self.user, 'settings')
self.__update_settings__()

@property
def registries(self) -> List[str]:
Expand All @@ -54,17 +61,17 @@ def registries(self) -> List[str]:
@registries.setter
def registries(self, registries: List[str]) -> List[str]:
self._raw_settings = {r: {} for r in registries}
flag_modified(self.user, 'settings')
self.__update_settings__()

def add_registry(self, registry: str):
if registry not in self.registries:
self._raw_settings[registry] = {}
flag_modified(self.user, 'settings')
self.__update_settings__()

def remove_registry(self, registry: str):
if registry in self.registries:
del self._raw_settings[registry]
flag_modified(self.user, 'settings')
self.__update_settings__()

def is_registry_enabled(self, registry: str) -> bool:
return registry in self.registries
Expand Down
42 changes: 21 additions & 21 deletions lifemonitor/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,21 +143,21 @@ def has(self, key: str) -> bool:
def lock(self, key: str,
timeout: int = Timeout.REQUEST,
expire=15, retry=1, auto_renewal=True):
logger.debug("Getting lock for key %r...", _log_key_value(key))
logger.debug("Getting lock for key %r...", key)
if key in self.__locks__:
yield self.__locks__[key]
else:
lock = redis_lock.Lock(self.cache.backend, key, expire=expire, auto_renewal=auto_renewal, id=self.name)
while not lock.acquire(blocking=False, timeout=timeout if timeout > 0 else None):
logger.debug("Waiting for lock key '%r'... (retry in %r secs)", lock, retry)
time.sleep(retry)
logger.debug("Lock for key '%r' acquired: %r", _log_key_value(key), lock.locked)
logger.debug("Lock for key '%r' acquired: %r", key, lock.locked)
self.__locks__[key] = lock
logger.debug("Lock for key '%r' added to transaction %r: %r", _log_key_value(key), self.name, self.has_lock(key))
logger.debug("Lock for key '%r' added to transaction %r: %r", key, self.name, self.has_lock(key))
try:
yield lock
finally:
logger.debug("Releasing transactional lock context for key '%s'", _log_key_value(key))
logger.debug("Releasing transactional lock context for key '%s'", key)

def has_lock(self, key: str) -> bool:
return key in self.__locks__
Expand Down Expand Up @@ -192,24 +192,24 @@ def close(self):
logger.debug("Finalizing transaction...")
pipeline = self.__cache__.backend.pipeline()
for k, data in self.__data__.items():
logger.debug(f"Setting key {k[:100]} on transaction pipeline (timeout: {data[1]}")
logger.debug(f"Setting key {k} on transaction pipeline (timeout: {data[1]}")
pipeline.set(k, pickle.dumps(data[0]), ex=data[1] if data[1] > 0 else None)
pipeline.execute()
logger.debug("Transaction finalized!")
for k in list(self.__locks__.keys()):
lk = self.__locks__.pop(k)
if lk:
if lk.locked:
logger.debug("Releasing lock for key '%r'...", _log_key_value(k))
logger.debug("Releasing lock for key '%r'...", k)
try:
lk.release()
logger.debug("Lock for key '%r' released: %r", _log_key_value(k), lk.locked)
logger.debug("Lock for key '%r' released: %r", k, lk.locked)
except redis_lock.NotAcquired as e:
logger.debug(e)
else:
logger.debug("Lock for key '%s' not acquired or expired", _log_key_value(k))
logger.debug("Lock for key '%s' not acquired or expired")
else:
logger.debug("No lock for key %r", _log_key_value(k))
logger.debug("No lock for key %r", k)
logger.debug(f"All lock of {self} released")
logger.debug(f"{self} closed")
except Exception as e:
Expand Down Expand Up @@ -356,30 +356,30 @@ def to_dict(self, pattern=None):
def lock(self, key: str,
timeout: int = Timeout.REQUEST,
expire=15, retry=1, auto_renewal=True):
logger.debug("Getting lock for key %r...", _log_key_value(key))
logger.debug("Getting lock for key %r...", key)
lock = redis_lock.Lock(self.backend, key, expire=expire, auto_renewal=auto_renewal)
try:
while not lock.acquire(blocking=False, timeout=timeout if timeout > 0 else None):
logger.debug("Waiting to acquire the lock for '%r'... (retry in %r secs)", lock, retry)
time.sleep(retry)
logger.debug(f"Lock for key '{_log_key_value(key)}' acquired: {lock.locked}")
logger.debug(f"Lock for key '{key}' acquired: {lock.locked}")
yield lock
finally:
try:
logger.debug("Exiting from transactional lock context for key '%s'", _log_key_value(key))
logger.debug("Exiting from transactional lock context for key '%s'", key)
if not lock.locked:
logger.debug("Lock for key '%s' not acquired", _log_key_value(key))
logger.debug("Lock for key '%s' not acquired", key)
else:
logger.debug("Auto release of lock for key '%s'", _log_key_value(key))
logger.debug("Auto release of lock for key '%s'", key)
lock.release()
logger.debug("Lock for key='%s' released: %r", _log_key_value(key), lock.locked)
logger.debug("Lock for key='%s' released: %r", key, lock.locked)
except redis_lock.NotAcquired as e:
logger.debug(e)

def set(self, key: str, value, timeout: int = Timeout.NONE, prefix: str = CACHE_PREFIX):
if key is not None and self.cache_enabled:
key = self._make_key(key, prefix=prefix)
logger.debug("Setting cache value for key %r.... (timeout: %r)", _log_key_value(key), timeout)
logger.debug("Setting cache value for key %r.... (timeout: %r)", key, timeout)
if value is None:
self.backend.delete(key)
else:
Expand Down Expand Up @@ -412,7 +412,7 @@ def delete_keys(self, pattern: str, prefix: str = CACHE_PREFIX):
logger.debug("Redis backend detected!")
logger.debug(f"Pattern: {prefix}{pattern}")
for key in self.backend.scan_iter(self._make_key(pattern, prefix=prefix)):
logger.debug("Delete key: %r", _log_key_value(key))
logger.debug("Delete key: %r", key)
self.backend.delete(key)

def clear(self):
Expand Down Expand Up @@ -482,7 +482,7 @@ def make_cache_key(func=None, client_scope=True, args=None, kwargs=None) -> str:
if kwargs:
kwargs_str = "-".join([f"{k}={str(v)}" for k, v in kwargs.items()])
result += f"#{kwargs_str}"
logger.debug("make_key calculated key: %r", _log_key_value(result))
logger.debug("make_key calculated key: %r", result)
return result


Expand Down Expand Up @@ -519,13 +519,13 @@ def _process_cache_data(cache, transaction, key, unless, timeout,
logger.debug("Cache empty: getting value from the actual function...")
result = function(*args, **kwargs)
logger.debug("Checking unless function: %r", unless)
if unless is None or unless is False or callable(unless) and not unless(*args[1:], result=result, **kwargs):
if unless is None or unless is False or callable(unless) and not unless(*args, _value_to_cache=result, **kwargs):
writer.set(key, result, timeout=timeout)
else:
logger.debug("Don't set value in cache due to unless=%r",
"None" if unless is None else "True")
else:
logger.debug(f"Reusing value from cache key '{_log_key_value(key)}'...")
logger.debug(f"Reusing value from cache key '{key}'...")
return result


Expand Down Expand Up @@ -570,7 +570,7 @@ def cache_function(function: Callable, timeout=Timeout.REQUEST,
logger.debug("Computed callable skip_transacion: %r", skip_transaction)
elif not transaction and isinstance(transactional_update, bool):
skip_transaction = not transactional_update
logger.debug("Skipping transaction for %r: %r", _log_key_value(key), skip_transaction)
logger.debug("Skipping transaction for %r: %r", key, skip_transaction)
if not skip_transaction: # transaction or transactional_update: # skip_transaction:
read_from_cache = transaction is None
logger.debug("Read from cache: %r", read_from_cache)
Expand Down
8 changes: 5 additions & 3 deletions lifemonitor/integrations/github/controllers.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,16 +148,18 @@ def refresh_workflow_build(event: GithubEvent):

with cache.cache.transaction():
for i in instances:
logger.warning("Checking version %s in refs %r", i.test_suite.workflow_version.version, refs)
logger.warning("Checking version %s in refs %r", i.test_suite.workflow_version, refs)
workflow_version = i.test_suite.workflow_version
if not workflow_version:
raise RuntimeError("No workflow version associated with test instance %r", i)
if workflow_version.version in refs or not workflow_version.has_revision():
logger.warning("Version %s in refs %r", i.test_suite.workflow_version.version, refs)
last_build_id = f"{github_workflow_run.id}_{github_workflow_run.raw_data['run_attempt']}"
i.get_test_build(last_build_id)
i.test_suite.workflow_version.status
logger.info("Version %s updated... last build: %s", i.test_suite.workflow_version.version, last_build_id)
else:
logger.warning("Version %s not in refs %r", i.test_suite.workflow_version.version, refs)
else:
logger.warning("Version %s not in refs %r", i.test_suite.workflow_version.version, refs)
return f"Test instance related with resource '{workflow_resource}' updated", 200
else:
return "No build attached to the current event", 204
Expand Down
4 changes: 2 additions & 2 deletions lifemonitor/integrations/github/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def from_dict(cls, data: Dict) -> GithubEvent:
return cls(data.get('headers', {}), data.get('data', {}))

def to_dict(self) -> Dict:
logger.error("Headers: %r", self._headers)
logger.debug("Headers: %r", self._headers)
return {
'headers': {k: v for k, v in self._headers.items()},
'data': self._raw_data
Expand All @@ -238,7 +238,7 @@ def from_json(cls, data: str) -> GithubEvent:
return cls(raw_data.get('headers', {}), raw_data.get('data', {}))

def to_json(self) -> str:
logger.error("Headers: %r", self._headers)
logger.debug("Headers: %r", self._headers)
return json.dumps(self.to_dict())


Expand Down
8 changes: 5 additions & 3 deletions lifemonitor/integrations/github/pull_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ def find_pull_request_by_title(repo: Repository, title: str) -> PullRequest:


def __prepare_pr_head__(repo: InstallationGithubWorkflowRepository,
identifier: str, files: List[RepositoryFile], allow_update: bool = True):
identifier: str, files: List[RepositoryFile],
commit_message: str = None, allow_update: bool = True):
assert isinstance(repo, Repository)

try:
Expand Down Expand Up @@ -145,7 +146,7 @@ def __prepare_pr_head__(repo: InstallationGithubWorkflowRepository,
logger.warning("No parent tree found")
tree = repo.create_git_tree(git_elements, base_tree=base_tree)
parent = repo.get_git_commit(sha=branch.commit.sha)
commit = repo.create_git_commit("Initialise workflow repository", tree, [parent])
commit = repo.create_git_commit(commit_message or "Update workflow repository", tree, [parent])
branch_ref.edit(sha=commit.sha)
return head
except KeyError as e:
Expand All @@ -166,7 +167,8 @@ def create_pull_request_from_github_issue(repo: InstallationGithubWorkflowReposi
pr = find_pull_request_by_title(repo, issue.id)
if pr and update_comment:
issue.create_comment(update_comment)
head = __prepare_pr_head__(repo, identifier, files, allow_update=allow_update)
head = __prepare_pr_head__(repo, identifier, files, allow_update=allow_update,
commit_message=f"Fix '{issue.title}'")
logger.debug("HEAD: %r -> %r", head, repo)
if not pr:
if create_comment:
Expand Down
10 changes: 5 additions & 5 deletions lifemonitor/integrations/github/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from typing import (Any, Callable, Dict, List, Optional, OrderedDict, Tuple,
Type)

from lifemonitor.cache import Timeout, cache_function, cached
from lifemonitor.cache import Timeout, cache_function
from lifemonitor.integrations.github.config import (DEFAULT_BASE_URL,
DEFAULT_PER_PAGE,
DEFAULT_TIMEOUT)
Expand Down Expand Up @@ -196,7 +196,7 @@ def __init__(
)


def __cache_request_value__(verb: str, url: str, *args,
def __cache_request_value__(requester, verb: str, url: str, *args,
parameters: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None,
input: Optional[Any] = None, **kwargs):
Expand All @@ -213,21 +213,21 @@ class CachedGithubRequester(Requester):
Extend the default Github Requester to enable caching.
"""

@cached(timeout=Timeout.NONE, client_scope=False, transactional_update=True, unless=__cache_request_value__)
# @cached(timeout=Timeout.NONE, client_scope=False, transactional_update=True, unless=__cache_request_value__)
def requestJsonAndCheck(self, verb: str, url: str,
parameters: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, str]] = None,
input: Optional[Any] = None) -> Tuple[Dict[str, Any], Optional[Dict[str, Any]]]:
return super().requestJsonAndCheck(verb, url, parameters, headers, input)

@cached(timeout=Timeout.NONE, client_scope=False, transactional_update=True, unless=__cache_request_value__)
# @cached(timeout=Timeout.NONE, client_scope=False, transactional_update=True, unless=__cache_request_value__)
def requestMultipartAndCheck(self, verb: str, url: str,
parameters: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, Any]] = None,
input: Optional[OrderedDict] = None) -> Tuple[Dict[str, Any], Optional[Dict[str, Any]]]:
return super().requestMultipartAndCheck(verb, url, parameters, headers, input)

@cached(timeout=Timeout.NONE, client_scope=False, transactional_update=True, unless=__cache_request_value__)
# @cached(timeout=Timeout.NONE, client_scope=False, transactional_update=True, unless=__cache_request_value__)
def requestBlobAndCheck(self, verb: str, url: str,
parameters: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, Any]] = None,
Expand Down
2 changes: 1 addition & 1 deletion lifemonitor/static/src/package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "lifemonitor",
"description": "Workflow Testing Service",
"version": "0.8.0-beta2",
"version": "0.8.0-beta3",
"license": "MIT",
"author": "CRS4",
"main": "../dist/js/lifemonitor.min.js",
Expand Down
Loading

0 comments on commit 224aa0c

Please sign in to comment.