Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: DPE-5656 log rotation new options #597

Merged
merged 13 commits into from
Feb 6, 2025
13 changes: 13 additions & 0 deletions config.yaml
Original file line number Diff line number Diff line change
@@ -44,6 +44,19 @@ options:
description: Number of days for binary logs retention
type: int
default: 7
logs_audit_policy:
description: |
Audit log policy. Allowed values are: "all", "logins" (default), "queries".
Ref. at https://docs.percona.com/percona-server/8.0/audit-log-plugin.html#audit_log_policy
type: string
default: logins
logs_retention_period:
description: |
Specifies the retention period for rotated logs, in days. Accepts an integer value of 3 or
greater, or the special value "auto". When set to "auto" (default), the retention period is
3 days, except when COS-related, where it is 1 day
type: string
default: auto
# Experimental features
experimental-max-connections:
type: int
26 changes: 13 additions & 13 deletions lib/charms/mysql/v0/mysql.py
Original file line number Diff line number Diff line change
@@ -133,7 +133,7 @@ def wait_until_mysql_connection(self) -> None:
# Increment this major API version when introducing breaking changes
LIBAPI = 0

LIBPATCH = 81
LIBPATCH = 82

UNIT_TEARDOWN_LOCKNAME = "unit-teardown"
UNIT_ADD_LOCKNAME = "unit-add"
@@ -930,6 +930,7 @@ def render_mysqld_configuration( # noqa: C901
profile: str,
audit_log_enabled: bool,
audit_log_strategy: str,
audit_log_policy: str,
memory_limit: Optional[int] = None,
experimental_max_connections: Optional[int] = None,
binlog_retention_days: int,
@@ -1000,8 +1001,7 @@ def render_mysqld_configuration( # noqa: C901
"general_log_file": f"{snap_common}/var/log/mysql/general.log",
"slow_query_log_file": f"{snap_common}/var/log/mysql/slow.log",
"binlog_expire_logs_seconds": f"{binlog_retention_seconds}",
"loose-audit_log_filter": "OFF",
"loose-audit_log_policy": "LOGINS",
"loose-audit_log_policy": audit_log_policy.upper(),
"loose-audit_log_file": f"{snap_common}/var/log/mysql/audit.log",
}

@@ -2950,8 +2950,8 @@ def retrieve_backup_with_xbcloud(
temp_restore_directory: str,
xbcloud_location: str,
xbstream_location: str,
user=None,
group=None,
user: Optional[str] = None,
group: Optional[str] = None,
) -> Tuple[str, str, str]:
"""Retrieve the specified backup from S3."""
nproc_command = ["nproc"]
@@ -3017,8 +3017,8 @@ def prepare_backup_for_restore(
backup_location: str,
xtrabackup_location: str,
xtrabackup_plugin_dir: str,
user=None,
group=None,
user: Optional[str] = None,
group: Optional[str] = None,
) -> Tuple[str, str]:
"""Prepare the backup in the provided dir for restore."""
try:
@@ -3058,8 +3058,8 @@ def prepare_backup_for_restore(
def empty_data_files(
self,
mysql_data_directory: str,
user=None,
group=None,
user: Optional[str] = None,
group: Optional[str] = None,
) -> None:
"""Empty the mysql data directory in preparation of backup restore."""
empty_data_files_command = [
@@ -3095,8 +3095,8 @@ def restore_backup(
defaults_config_file: str,
mysql_data_directory: str,
xtrabackup_plugin_directory: str,
user=None,
group=None,
user: Optional[str] = None,
group: Optional[str] = None,
) -> Tuple[str, str]:
"""Restore the provided prepared backup."""
restore_backup_command = [
@@ -3129,8 +3129,8 @@ def restore_backup(
def delete_temp_restore_directory(
self,
temp_restore_directory: str,
user=None,
group=None,
user: Optional[str] = None,
group: Optional[str] = None,
) -> None:
"""Delete the temp restore directory from the mysql data directory."""
logger.info(f"Deleting temp restore directory in {temp_restore_directory}")
38 changes: 14 additions & 24 deletions src/charm.py
Original file line number Diff line number Diff line change
@@ -27,7 +27,6 @@
)
from charms.mysql.v0.backups import S3_INTEGRATOR_RELATION_NAME, MySQLBackups
from charms.mysql.v0.mysql import (
BYTES_1MB,
Error,
MySQLAddInstanceToClusterError,
MySQLCharmBase,
@@ -75,7 +74,6 @@
from constants import (
BACKUPS_PASSWORD_KEY,
BACKUPS_USERNAME,
CHARMED_MYSQL_COMMON_DIRECTORY,
CHARMED_MYSQL_SNAP_NAME,
CHARMED_MYSQLD_SERVICE,
CLUSTER_ADMIN_PASSWORD_KEY,
@@ -97,6 +95,7 @@
from flush_mysql_logs import FlushMySQLLogsCharmEvents, MySQLLogs
from hostname_resolution import MySQLMachineHostnameResolution
from ip_address_observer import IPAddressChangeCharmEvents
from log_rotation_setup import LogRotationSetup
from mysql_vm_helpers import (
MySQL,
MySQLCreateCustomMySQLDConfigError,
@@ -189,6 +188,8 @@ def __init__(self, *args):
self.framework.observe(
self.on[COS_AGENT_RELATION_NAME].relation_broken, self._on_cos_agent_relation_broken
)

self.log_rotation_setup = LogRotationSetup(self)
self.s3_integrator = S3Requirer(self, S3_INTEGRATOR_RELATION_NAME)
self.backups = MySQLBackups(self, self.s3_integrator)
self.hostname_resolution = MySQLMachineHostnameResolution(self)
@@ -284,25 +285,12 @@ def _on_config_changed(self, _) -> None:
return

# render the new config
memory_limit_bytes = (self.config.profile_limit_memory or 0) * BYTES_1MB
new_config_content, new_config_dict = self._mysql.render_mysqld_configuration(
profile=self.config.profile,
audit_log_enabled=self.config.plugin_audit_enabled,
audit_log_strategy=self.config.plugin_audit_strategy,
snap_common=CHARMED_MYSQL_COMMON_DIRECTORY,
memory_limit=memory_limit_bytes,
experimental_max_connections=self.config.experimental_max_connections,
binlog_retention_days=self.config.binlog_retention_days,
)
new_config_dict = self._mysql.write_mysqld_config()

changed_config = compare_dictionaries(previous_config, new_config_dict)

logger.info("Persisting configuration changes to file")
# always persist config to file
self._mysql.write_content_to_file(
path=MYSQLD_CUSTOM_CONFIG_FILE, content=new_config_content
)
self._mysql.setup_logrotate_and_cron(self.text_logs)
# Override log rotation
self.log_rotation_setup.setup()

if (
self.mysql_config.keys_requires_restart(changed_config)
@@ -312,9 +300,9 @@ def _on_config_changed(self, _) -> None:
if "loose-audit_log_format" in changed_config:
# plugins are manipulated on running daemon
if self.config.plugin_audit_enabled:
self._mysql.install_plugins(["audit_log", "audit_log_filter"])
self._mysql.install_plugins(["audit_log"])
else:
self._mysql.uninstall_plugins(["audit_log", "audit_log_filter"])
self._mysql.uninstall_plugins(["audit_log"])

self.on[f"{self.restart.name}"].acquire_lock.emit()

@@ -325,7 +313,9 @@ def _on_config_changed(self, _) -> None:
if config not in new_config_dict:
# skip removed configs
continue
self._mysql.set_dynamic_variable(config, new_config_dict[config])
self._mysql.set_dynamic_variable(
config.removeprefix("loose-"), new_config_dict[config]
)

def _on_start(self, event: StartEvent) -> None:
"""Handle the start event.
@@ -650,7 +640,7 @@ def get_unit_hostname(self, unit_name: Optional[str] = None) -> str:
"""Get the hostname of the unit."""
if unit_name:
unit = self.model.get_unit(unit_name)
return self.peers.data[unit]["instance-hostname"].split(":")[0]
return self.peers.data[unit]["instance-hostname"].split(":")[0] # type: ignore
return self.unit_peer_data["instance-hostname"].split(":")[0]

@property
@@ -703,12 +693,12 @@ def workload_initialise(self) -> None:
self.hostname_resolution.update_etc_hosts(None)

self._mysql.write_mysqld_config()
self._mysql.setup_logrotate_and_cron(self.text_logs)
self.log_rotation_setup.setup()
self._mysql.reset_root_password_and_start_mysqld()
self._mysql.configure_mysql_users()

if self.config.plugin_audit_enabled:
self._mysql.install_plugins(["audit_log", "audit_log_filter"])
self._mysql.install_plugins(["audit_log"])

current_mysqld_pid = self._mysql.get_pid_of_port_3306()
self._mysql.configure_instance()
21 changes: 21 additions & 0 deletions src/config.py
Original file line number Diff line number Diff line change
@@ -70,6 +70,8 @@ class CharmConfig(BaseConfigModel):
binlog_retention_days: int
plugin_audit_enabled: bool
plugin_audit_strategy: str
logs_audit_policy: str
logs_retention_period: str

@validator("profile")
@classmethod
@@ -142,3 +144,22 @@ def plugin_audit_strategy_validator(cls, value: str) -> Optional[str]:
raise ValueError("Value not one of 'async' or 'semi-async'")

return value

@validator("logs_audit_policy")
@classmethod
def logs_audit_policy_validator(cls, value: str) -> Optional[str]:
"""Check values for audit log policy."""
valid_values = ["all", "logins", "queries"]
if value not in valid_values:
raise ValueError(f"logs_audit_policy not one of {', '.join(valid_values)}")

return value

@validator("logs_retention_period")
@classmethod
def logs_retention_period_validator(cls, value: str) -> str:
"""Check logs retention period."""
if (value.isalpha() and value != "auto") or (value.isdigit() and int(value) < 3):
raise ValueError("logs_retention_period must be >= 3 or `auto`")

return value
106 changes: 106 additions & 0 deletions src/log_rotation_setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
# Copyright 2025 Canonical Ltd.
# See LICENSE file for licensing details.

"""Handler for log rotation setup in relation to COS."""

import logging
import typing
from pathlib import Path

import yaml
from ops.framework import Object

from constants import COS_AGENT_RELATION_NAME

if typing.TYPE_CHECKING:
from charm import MySQLOperatorCharm

logger = logging.getLogger(__name__)

_POSITIONS_FILE = "/var/snap/grafana-agent/current/grafana-agent-positions/log_file_scraper.yml"
_LOGS_SYNCED = "logs_synced"


class LogRotationSetup(Object):
"""Configure logrotation settings in relation to COS integration."""

def __init__(self, charm: "MySQLOperatorCharm"):
super().__init__(charm, "log-rotation-setup")

self.charm = charm

self.framework.observe(self.charm.on.update_status, self._update_logs_rotation)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

minor suggestion: we can just call self._update_logs_rotation() here to have it run for any event that comes in (not just update-status). However, upon update-status is good too!

self.framework.observe(
self.charm.on[COS_AGENT_RELATION_NAME].relation_created, self._cos_relation_created
)
self.framework.observe(
self.charm.on[COS_AGENT_RELATION_NAME].relation_broken, self._cos_relation_broken
)

@property
def _logs_are_syncing(self):
return self.charm.unit_peer_data.get(_LOGS_SYNCED) == "true"

def setup(self):
"""Setup log rotation."""
# retention setting
if self.charm.config.logs_retention_period == "auto":
retention_period = 1 if self._logs_are_syncing else 3
else:
retention_period = int(self.charm.config.logs_retention_period)

# compression setting
compress = self._logs_are_syncing or not self.charm.has_cos_relation

self.charm._mysql.setup_logrotate_and_cron(
retention_period, self.charm.text_logs, compress
)

def _update_logs_rotation(self, _):
"""Check for log rotation auto configuration handler.

Reconfigure log rotation if promtail/gagent start sync.
"""
if not self.model.get_relation(COS_AGENT_RELATION_NAME):
return

if self._logs_are_syncing:
# reconfiguration done
return

positions_file = Path(_POSITIONS_FILE)

not_started_msg = "Log syncing not yet started."
if not positions_file.exists():
logger.debug(not_started_msg)
return

with open(positions_file, "r") as pos_fd:
positions = yaml.safe_load(pos_fd.read())

if sync_files := positions.get("positions"):
for log_file, line in sync_files.items():
if "mysql" in log_file and int(line) > 0:
break
else:
logger.debug(not_started_msg)
return
else:
logger.debug(not_started_msg)
return

logger.info("Reconfigure log rotation after logs upload started")
self.charm.unit_peer_data[_LOGS_SYNCED] = "true"
self.setup()

def _cos_relation_created(self, _):
"""Handle relation created."""
logger.info("Reconfigure log rotation on cos relation created")
self.setup()

def _cos_relation_broken(self, _):
"""Unset auto value for log retention."""
logger.info("Reconfigure log rotation after logs upload stops")

del self.charm.unit_peer_data[_LOGS_SYNCED]
self.setup()
Loading