Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 35 additions & 10 deletions lib/charms/grafana_agent/v0/cos_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,9 @@ def __init__(self, *args):
```
"""

import copy
import enum
import hashlib
import json
import logging
import socket
Expand Down Expand Up @@ -254,7 +256,7 @@ class _MetricsEndpointDict(TypedDict):

LIBID = "dc15fa84cef84ce58155fb84f6c6213a"
LIBAPI = 0
LIBPATCH = 24
LIBPATCH = 25

PYDEPS = ["cosl >= 0.0.50", "pydantic"]

Expand Down Expand Up @@ -308,6 +310,13 @@ def _dedupe_list(items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
return unique_items


def _dict_hash_except_key(scrape_config: Dict[str, Any], key: Optional[str]):
"""Get a hash of the scrape_config dict, except for the specified key."""
cfg_for_hash = {k: v for k, v in scrape_config.items() if k != key}
serialized = json.dumps(cfg_for_hash, sort_keys=True)
return hashlib.blake2b(serialized.encode(), digest_size=4).hexdigest()


class TracingError(Exception):
"""Base class for custom errors raised by tracing."""

Expand Down Expand Up @@ -697,6 +706,27 @@ def _on_refresh(self, event):
) as e:
logger.error("Invalid relation data provided: %s", e)

def _deterministic_scrape_configs(
self, scrape_configs: List[Dict[str, Any]]
) -> List[Dict[str, Any]]:
"""Get deterministic scrape_configs with stable job names.

For stability across serializations, compute a short per-config hash
and append it to the existing job name (or 'default'). Keep the app
name as a prefix: <app>_<job_or_default>_<8hex-hash>.

Hash the whole scrape_config (except any existing job_name) so the
suffix is sensitive to all stable fields. Use deterministic JSON
serialization.
"""
local_scrape_configs = copy.deepcopy(scrape_configs)
for scrape_config in local_scrape_configs:
name = scrape_config.get("job_name", "default")
short_id = _dict_hash_except_key(scrape_config, "job_name")
scrape_config["job_name"] = f"{self._charm.app.name}_{name}_{short_id}"

return sorted(local_scrape_configs, key=lambda c: c.get("job_name", ""))

@property
def _scrape_jobs(self) -> List[Dict]:
"""Return a list of scrape_configs.
Expand All @@ -711,22 +741,17 @@ def _scrape_jobs(self) -> List[Dict]:
scrape_configs = self._scrape_configs.copy()

# Convert "metrics_endpoints" to standard scrape_configs, and add them in
unit_name = self._charm.unit.name.replace("/", "_")
for endpoint in self._metrics_endpoints:
port = endpoint["port"]
path = endpoint["path"]
sanitized_path = path.strip("/").replace("/", "_")
scrape_configs.append(
{
"job_name": f"{unit_name}_localhost_{port}_{sanitized_path}",
"metrics_path": path,
"static_configs": [{"targets": [f"localhost:{port}"]}],
"metrics_path": endpoint["path"],
"static_configs": [{"targets": [f"localhost:{endpoint['port']}"]}],
}
)

scrape_configs = scrape_configs or []

return scrape_configs
return self._deterministic_scrape_configs(scrape_configs)

@property
def _metrics_alert_rules(self) -> Dict:
Expand All @@ -742,7 +767,7 @@ def _metrics_alert_rules(self) -> Dict:
)
alert_rules.add_path(self._metrics_rules, recursive=self._recursive)
alert_rules.add(
generic_alert_groups.application_rules,
copy.deepcopy(generic_alert_groups.application_rules),
group_name_prefix=JujuTopology.from_charm(self._charm).identifier,
)

Expand Down