Skip to content

Commit

Permalink
chore: update charm libraries
Browse files Browse the repository at this point in the history
  • Loading branch information
amandahla committed Feb 15, 2025
1 parent f87168d commit 1c30686
Show file tree
Hide file tree
Showing 3 changed files with 68 additions and 19 deletions.
21 changes: 16 additions & 5 deletions lib/charms/grafana_k8s/v0/grafana_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ def __init__(self, *args):
# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version

LIBPATCH = 39
LIBPATCH = 42

PYDEPS = ["cosl >= 0.0.50"]

Expand Down Expand Up @@ -417,8 +417,7 @@ def __init__(
self.expected_relation_interface = expected_relation_interface
self.actual_relation_interface = actual_relation_interface
self.message = (
"The '{}' relation has '{}' as "
"interface rather than the expected '{}'".format(
"The '{}' relation has '{}' as " "interface rather than the expected '{}'".format(
relation_name, actual_relation_interface, expected_relation_interface
)
)
Expand Down Expand Up @@ -634,7 +633,10 @@ def _replace_template_fields( # noqa: C901
deletions = []
for tmpl in dict_content["templating"]["list"]:
if tmpl["name"] and tmpl["name"] in used_replacements:
deletions.append(tmpl)
# it might happen that existing template var name is the same as the one we insert (i.e prometheusds or lokids)
# in that case, we want to pop the existing one only.
if tmpl not in DATASOURCE_TEMPLATE_DROPDOWNS:
deletions.append(tmpl)

for d in deletions:
dict_content["templating"]["list"].remove(d)
Expand Down Expand Up @@ -962,6 +964,13 @@ def _replace_uid(
"Processed dashboard '%s': kept original uid '%s'", dashboard_path, original_uid
)

@classmethod
def _add_tags(cls, dashboard_dict: dict, charm_name: str):
tags: List[str] = dashboard_dict.get("tags", [])
if not any(tag.startswith("charm: ") for tag in tags):
tags.append(f"charm: {charm_name}")
dashboard_dict["tags"] = tags

@classmethod
def load_dashboards_from_dir(
cls,
Expand Down Expand Up @@ -1004,6 +1013,8 @@ def _is_dashboard(p: Path) -> bool:
charm_name=charm_name,
)

cls._add_tags(dashboard_dict=dashboard_dict, charm_name=charm_name)

id = "file:{}".format(path.stem)
dashboard_templates[id] = cls._content_to_dashboard_object(
charm_name=charm_name,
Expand Down Expand Up @@ -1601,7 +1612,7 @@ def _render_dashboards_and_signal_changed(self, relation: Relation) -> bool: #

if not coerced_data == stored_data:
stored_dashboards = self.get_peer_data("dashboards")
stored_dashboards[relation.id] = stored_data
stored_dashboards[str(relation.id)] = stored_data
self.set_peer_data("dashboards", stored_dashboards)
return True
return None # type: ignore
Expand Down
40 changes: 28 additions & 12 deletions lib/charms/prometheus_k8s/v0/prometheus_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ def _on_scrape_targets_changed(self, event):
import yaml
from cosl import JujuTopology
from cosl.rules import AlertRules, generic_alert_groups
from ops.charm import CharmBase, RelationRole
from ops.charm import CharmBase, RelationJoinedEvent, RelationRole
from ops.framework import (
BoundEvent,
EventBase,
Expand All @@ -362,7 +362,7 @@ def _on_scrape_targets_changed(self, event):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 49
LIBPATCH = 50

PYDEPS = ["cosl"]

Expand Down Expand Up @@ -1309,6 +1309,8 @@ def __init__(
refresh_event: Optional[Union[BoundEvent, List[BoundEvent]]] = None,
external_url: str = "",
lookaside_jobs_callable: Optional[Callable] = None,
*,
forward_alert_rules: bool = True,
):
"""Construct a metrics provider for a Prometheus charm.
Expand Down Expand Up @@ -1411,6 +1413,7 @@ def __init__(
files. Defaults to "./prometheus_alert_rules",
resolved relative to the directory hosting the charm entry file.
The alert rules are automatically updated on charm upgrade.
forward_alert_rules: a boolean flag to toggle forwarding of charmed alert rules.
refresh_event: an optional bound event or list of bound events which
will be observed to re-set scrape job data (IP address and others)
external_url: an optional argument that represents an external url that
Expand Down Expand Up @@ -1449,6 +1452,7 @@ def __init__(

self._charm = charm
self._alert_rules_path = alert_rules_path
self._forward_alert_rules = forward_alert_rules
self._relation_name = relation_name
# sanitize job configurations to the supported subset of parameters
jobs = [] if jobs is None else jobs
Expand Down Expand Up @@ -1530,10 +1534,11 @@ def set_scrape_job_spec(self, _=None):
return

alert_rules = AlertRules(query_type="promql", topology=self.topology)
alert_rules.add_path(self._alert_rules_path, recursive=True)
alert_rules.add(
generic_alert_groups.application_rules, group_name_prefix=self.topology.identifier
)
if self._forward_alert_rules:
alert_rules.add_path(self._alert_rules_path, recursive=True)
alert_rules.add(
generic_alert_groups.application_rules, group_name_prefix=self.topology.identifier
)
alert_rules_as_dict = alert_rules.as_dict()

for relation in self._charm.model.relations[self._relation_name]:
Expand Down Expand Up @@ -1780,6 +1785,8 @@ def __init__(
relabel_instance=True,
resolve_addresses=False,
path_to_own_alert_rules: Optional[str] = None,
*,
forward_alert_rules: bool = True,
):
"""Construct a `MetricsEndpointAggregator`.
Expand All @@ -1800,6 +1807,7 @@ def __init__(
should attempt to perform DNS lookups of targets and append
a `dns_name` label
path_to_own_alert_rules: Optionally supply a path for alert rule files
forward_alert_rules: a boolean flag to toggle forwarding of charmed alert rules
"""
self._charm = charm

Expand All @@ -1819,6 +1827,8 @@ def __init__(
self._relabel_instance = relabel_instance
self._resolve_addresses = resolve_addresses

self._forward_alert_rules = forward_alert_rules

# manage Prometheus charm relation events
prometheus_events = self._charm.on[self._prometheus_relation]
self.framework.observe(prometheus_events.relation_joined, self._set_prometheus_data)
Expand All @@ -1837,7 +1847,7 @@ def __init__(
self.framework.observe(alert_rule_events.relation_changed, self._on_alert_rules_changed)
self.framework.observe(alert_rule_events.relation_departed, self._on_alert_rules_departed)

def _set_prometheus_data(self, event):
def _set_prometheus_data(self, event: Optional[RelationJoinedEvent] = None):
"""Ensure every new Prometheus instances is updated.
Any time a new Prometheus unit joins the relation with
Expand Down Expand Up @@ -1878,8 +1888,12 @@ def _set_prometheus_data(self, event):
groups.extend(alert_rules.as_dict()["groups"])

# Set scrape jobs and alert rules in relation data
event.relation.data[self._charm.app]["scrape_jobs"] = json.dumps(jobs)
event.relation.data[self._charm.app]["alert_rules"] = json.dumps({"groups": groups})
relations = [event.relation] if event else self.model.relations[self._prometheus_relation]
for rel in relations:
rel.data[self._charm.app]["scrape_jobs"] = json.dumps(jobs) # type: ignore
rel.data[self._charm.app]["alert_rules"] = json.dumps( # type: ignore
{"groups": groups if self._forward_alert_rules else []}
)

def _on_prometheus_targets_changed(self, event):
"""Update scrape jobs in response to scrape target changes.
Expand Down Expand Up @@ -2150,7 +2164,9 @@ def set_alert_rule_data(self, name: str, unit_rules: dict, label_rules: bool = T

if updated_group["name"] not in [g["name"] for g in groups]:
groups.append(updated_group)
relation.data[self._charm.app]["alert_rules"] = json.dumps({"groups": groups})
relation.data[self._charm.app]["alert_rules"] = json.dumps(
{"groups": groups if self._forward_alert_rules else []}
)

if not _type_convert_stored(self._stored.alert_rules) == groups: # pyright: ignore
self._stored.alert_rules = groups
Expand Down Expand Up @@ -2198,8 +2214,8 @@ def remove_alert_rules(self, group_name: str, unit_name: str) -> None:
changed_group["rules"] = rules_kept # type: ignore
groups.append(changed_group)

relation.data[self._charm.app]["alert_rules"] = (
json.dumps({"groups": groups}) if groups else "{}"
relation.data[self._charm.app]["alert_rules"] = json.dumps(
{"groups": groups if self._forward_alert_rules else []}
)

if not _type_convert_stored(self._stored.alert_rules) == groups: # pyright: ignore
Expand Down
26 changes: 24 additions & 2 deletions lib/charms/smtp_integrator/v0/smtp.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def _on_config_changed(self, _) -> None:

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 14
LIBPATCH = 15

PYDEPS = ["pydantic>=2"]

Expand All @@ -89,6 +89,14 @@ def _on_config_changed(self, _) -> None:
LEGACY_RELATION_NAME = "smtp-legacy"


class SmtpError(Exception):
"""Common ancestor for Smtp related exceptions."""


class SecretError(SmtpError):
"""Common ancestor for Secrets related exceptions."""


class TransportSecurity(str, Enum):
"""Represent the transport security values.
Expand Down Expand Up @@ -295,11 +303,25 @@ def _get_relation_data_from_relation(
relation_data = relation.data[relation.app]
if not relation_data:
return None

password = relation_data.get("password")
if password is None and relation_data.get("password_id"):
try:
password = (
self.model.get_secret(id=relation_data.get("password_id"))
.get_content()
.get("password")
)
except ops.model.ModelError as exc:
raise SecretError(
f"Could not consume secret {relation_data.get('password_id')}"
) from exc

return SmtpRelationData(
host=typing.cast(str, relation_data.get("host")),
port=typing.cast(int, relation_data.get("port")),
user=relation_data.get("user"),
password=relation_data.get("password"),
password=password,
password_id=relation_data.get("password_id"),
auth_type=AuthType(relation_data.get("auth_type")),
transport_security=TransportSecurity(relation_data.get("transport_security")),
Expand Down

0 comments on commit 1c30686

Please sign in to comment.