Skip to content

Commit

Permalink
fix lint and update according to review
Browse files Browse the repository at this point in the history
  • Loading branch information
natalian98 authored and DnPlas committed Aug 3, 2022
1 parent db13a9e commit 9486127
Show file tree
Hide file tree
Showing 5 changed files with 92 additions and 60 deletions.
64 changes: 44 additions & 20 deletions lib/charms/grafana_k8s/v0/grafana_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def __init__(self, *args):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 11
LIBPATCH = 13

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -541,17 +541,26 @@ def _convert_dashboard_fields(content: str) -> str:
datasources = {}
existing_templates = False

# If the dashboard has __inputs, get the names to replace them. These are stripped
# from reactive dashboards in GrafanaDashboardAggregator, but charm authors in
# newer charms may import them directly from the marketplace
if "__inputs" in dict_content:
for field in dict_content["__inputs"]:
if "type" in field and field["type"] == "datasource":
datasources[field["name"]] = field["pluginName"].lower()
del dict_content["__inputs"]

# If no existing template variables exist, just insert our own
if "templating" not in dict_content:
dict_content["templating"] = {"list": [d for d in TEMPLATE_DROPDOWNS]}
else:
# Otherwise, set a flag so we can go back later
existing_templates = True
for maybe in dict_content["templating"]["list"]:
for template_value in dict_content["templating"]["list"]:
# Build a list of `datasource_name`: `datasource_type` mappings
# The "query" field is actually "prometheus", "loki", "influxdb", etc
if "type" in maybe and maybe["type"] == "datasource":
datasources[maybe["name"]] = maybe["query"]
if "type" in template_value and template_value["type"] == "datasource":
datasources[template_value["name"]] = template_value["query"].lower()

# Put our own variables in the template
for d in TEMPLATE_DROPDOWNS:
Expand All @@ -563,7 +572,7 @@ def _convert_dashboard_fields(content: str) -> str:
return json.dumps(dict_content)


def _replace_template_fields(
def _replace_template_fields( # noqa: C901
dict_content: dict, datasources: dict, existing_templates: bool
) -> dict:
"""Make templated fields get cleaned up afterwards.
Expand All @@ -586,11 +595,17 @@ def _replace_template_fields(
#
# COS only knows about Prometheus and Loki.
for panel in panels:
if "datasource" not in panel:
if "datasource" not in panel or not panel.get("datasource", ""):
continue
if not existing_templates:
panel["datasource"] = "${prometheusds}"
else:
if panel["datasource"].lower() in replacements.values():
# Already a known template variable
continue
if not panel["datasource"]:
# Don't worry about null values
continue
# Strip out variable characters and maybe braces
ds = re.sub(r"(\$|\{|\})", "", panel["datasource"])
replacement = replacements.get(datasources[ds], "")
Expand Down Expand Up @@ -654,19 +669,25 @@ class GrafanaDashboardEvent(EventBase):
Enables us to set a clear status on the provider.
"""

def __init__(self, handle, error_message: str = "", valid: bool = False):
def __init__(self, handle, errors: List[Dict[str, str]] = [], valid: bool = False):
super().__init__(handle)
self.error_message = error_message
self.errors = errors
self.error_message = "; ".join([error["error"] for error in errors if "error" in error])
self.valid = valid

def snapshot(self) -> Dict:
"""Save grafana source information."""
return {"error_message": self.error_message, "valid": self.valid}
return {
"error_message": self.error_message,
"valid": self.valid,
"errors": json.dumps(self.errors),
}

def restore(self, snapshot):
"""Restore grafana source information."""
self.error_message = snapshot["error_message"]
self.valid = snapshot["valid"]
self.errors = json.loads(snapshot["errors"])


class GrafanaProviderEvents(ObjectEvents):
Expand Down Expand Up @@ -830,11 +851,12 @@ def _update_all_dashboards_from_dir(self, _: Optional[HookEvent] = None) -> None
del stored_dashboard_templates[dashboard_id]

# Path.glob uses fnmatch on the backend, which is pretty limited, so use a
# lambda for the filter
for path in filter(
lambda p: p.is_file and p.name.endswith((".json", ".json.tmpl", ".tmpl")),
Path(self._dashboards_path).glob("*"),
):
# custom function for the filter
def _is_dashbaord(p: Path) -> bool:
return p.is_file and p.name.endswith((".json", ".json.tmpl", ".tmpl"))

for path in filter(_is_dashbaord, Path(self._dashboards_path).glob("*")):
# path = Path(path)
id = "file:{}".format(path.stem)
stored_dashboard_templates[id] = self._content_to_dashboard_object(
_encode_dashboard_content(path.read_bytes())
Expand Down Expand Up @@ -1059,7 +1081,7 @@ def update_dashboards(self, relation: Optional[Relation] = None) -> None:
)

for relation in relations:
self._render_dashboards_and_signal_changed(relation) # type: ignore
self._render_dashboards_and_signal_changed(relation)

if changes:
self.on.dashboards_changed.emit()
Expand Down Expand Up @@ -1090,7 +1112,7 @@ def _render_dashboards_and_signal_changed(self, relation: Relation) -> bool: #
"""
other_app = relation.app

raw_data = relation.data[other_app].get("dashboards", {})
raw_data = relation.data[other_app].get("dashboards", {}) # type: ignore

if not raw_data:
logger.warning(
Expand Down Expand Up @@ -1498,10 +1520,12 @@ def _maybe_get_builtin_dashboards(self, event: RelationEvent) -> Dict:
)

if dashboards_path:
for path in filter(
lambda p: p.is_file and p.name.endswith((".json", ".json.tmpl", ".tmpl")),
Path(dashboards_path).glob("*"),
):

def _is_dashbaord(p: Path) -> bool:
return p.is_file and p.name.endswith((".json", ".json.tmpl", ".tmpl"))

for path in filter(_is_dashbaord, Path(dashboards_path).glob("*")):
# path = Path(path)
if event.app.name in path.name:
id = "file:{}".format(path.stem)
builtins[id] = self._content_to_dashboard_object(
Expand Down
80 changes: 44 additions & 36 deletions lib/charms/prometheus_k8s/v0/prometheus_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,32 +311,30 @@ def _on_scrape_targets_changed(self, event):
""" # noqa: W505

import contextlib
import ipaddress
import json
import logging
import os
import platform
import socket
import subprocess
from collections import OrderedDict
from pathlib import Path
from typing import Dict, List, Optional, Union

import yaml
from ops.charm import CharmBase, RelationRole
from ops.framework import EventBase, EventSource, Object, ObjectEvents
from ops.framework import BoundEvent, EventBase, EventSource, Object, ObjectEvents

# The unique Charmhub library identifier, never change it
from ops.model import ModelError

LIBID = "bc84295fef5f4049878f07b131968ee2"

# Increment this major API version when introducing breaking changes
LIBAPI = 0

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 19
LIBPATCH = 20

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -923,7 +921,7 @@ def add_path(self, path: str, *, recursive: bool = False) -> None:
elif path.is_file():
self.alert_groups.extend(self._from_file(path.parent, path))
else:
logger.warning("path does not exist: %s", path)
logger.debug("Alert rules path does not exist: %s", path)

def as_dict(self) -> dict:
"""Return standard alert rules file in dict representation.
Expand Down Expand Up @@ -1090,7 +1088,7 @@ def alerts(self) -> dict:
"""
alerts = {} # type: Dict[str, dict] # mapping b/w juju identifiers and alert rule files
for relation in self._charm.model.relations[self._relation_name]:
if not relation.units:
if not relation.units or not relation.app:
continue

alert_rules = json.loads(relation.data[relation.app].get("alert_rules", "{}"))
Expand Down Expand Up @@ -1130,7 +1128,7 @@ def _get_identifier_by_alert_rules(self, rules: dict) -> Union[str, None]:
rules: a dict of alert rules
"""
if "groups" not in rules:
logger.warning("No alert groups were found in relation data")
logger.debug("No alert groups were found in relation data")
return None

# Construct an ID based on what's in the alert rules if they have labels
Expand Down Expand Up @@ -1432,6 +1430,7 @@ def __init__(
relation_name: str = DEFAULT_RELATION_NAME,
jobs=None,
alert_rules_path: str = DEFAULT_ALERT_RULES_RELATIVE_PATH,
refresh_event: Optional[Union[BoundEvent, List[BoundEvent]]] = None,
):
"""Construct a metrics provider for a Prometheus charm.
Expand Down Expand Up @@ -1525,6 +1524,8 @@ def __init__(
files. Defaults to "./prometheus_alert_rules",
resolved relative to the directory hosting the charm entry file.
The alert rules are automatically updated on charm upgrade.
refresh_event: an optional bound event or list of bound events which
will be observed to re-set scrape job data (IP address and others)
Raises:
RelationNotFoundError: If there is no relation in the charm's metadata.yaml
Expand All @@ -1543,7 +1544,7 @@ def __init__(
try:
alert_rules_path = _resolve_dir_against_charm_path(charm, alert_rules_path)
except InvalidAlertRulePathError as e:
logger.warning(
logger.debug(
"Invalid Prometheus alert rules folder at %s: %s",
e.alert_rules_absolute_path,
e.message,
Expand All @@ -1563,20 +1564,36 @@ def __init__(
self.framework.observe(events.relation_joined, self._set_scrape_job_spec)
self.framework.observe(events.relation_changed, self._set_scrape_job_spec)

# dirty fix: set the ip address when the containers start, as a workaround
# for not being able to lookup the pod ip
for container_name in charm.unit.containers:
self.framework.observe(
charm.on[container_name].pebble_ready,
self._set_unit_ip,
)
if not refresh_event:
if len(self._charm.meta.containers) == 1:
if "kubernetes" in self._charm.meta.series:
# This is a podspec charm
refresh_event = [self._charm.on.update_status]
else:
# This is a sidecar/pebble charm
container = list(self._charm.meta.containers.values())[0]
refresh_event = [self._charm.on[container.name.replace("-", "_")].pebble_ready]
else:
logger.warning(
"%d containers are present in metadata.yaml and "
"refresh_event was not specified. Defaulting to update_status. "
"Metrics IP may not be set in a timely fashion.",
len(self._charm.meta.containers),
)
refresh_event = [self._charm.on.update_status]

else:
if not isinstance(refresh_event, list):
refresh_event = [refresh_event]

# podspec charms do not have a pebble ready event so unit ips need to be set these events
self.framework.observe(self._charm.on.update_status, self._set_unit_ip)
self.framework.observe(self._charm.on.config_changed, self._set_unit_ip)
for ev in refresh_event:
self.framework.observe(ev, self._set_unit_ip)

self.framework.observe(self._charm.on.upgrade_charm, self._set_scrape_job_spec)

# If there is no leader during relation_joined we will still need to set alert rules.
self.framework.observe(self._charm.on.leader_elected, self._set_scrape_job_spec)

def _set_scrape_job_spec(self, event):
"""Ensure scrape target information is made available to prometheus.
Expand Down Expand Up @@ -1617,16 +1634,7 @@ def _set_unit_ip(self, _):
event is actually needed.
"""
for relation in self._charm.model.relations[self._relation_name]:
unit_ip = str(self._charm.model.get_binding(relation).network.bind_address)

if not self._is_valid_unit_address(unit_ip):
# relation data will be updated later when a valid address becomes available
with contextlib.suppress(KeyError):
del relation.data[self._charm.unit]["prometheus_scrape_unit_address"]
del relation.data[self._charm.unit]["prometheus_scrape_unit_name"]
continue

relation.data[self._charm.unit]["prometheus_scrape_unit_address"] = unit_ip
relation.data[self._charm.unit]["prometheus_scrape_unit_address"] = socket.getfqdn()
relation.data[self._charm.unit]["prometheus_scrape_unit_name"] = str(
self._charm.model.unit.name
)
Expand Down Expand Up @@ -1702,7 +1710,7 @@ def __init__(
try:
dir_path = _resolve_dir_against_charm_path(charm, dir_path)
except InvalidAlertRulePathError as e:
logger.warning(
logger.debug(
"Invalid Prometheus alert rules folder at %s: %s",
e.alert_rules_absolute_path,
e.message,
Expand Down Expand Up @@ -1872,13 +1880,13 @@ def _set_prometheus_data(self, event):
jobs = [] # list of scrape jobs, one per relation
for relation in self.model.relations[self._target_relation]:
targets = self._get_targets(relation)
if targets:
if targets and relation.app:
jobs.append(self._static_scrape_job(targets, relation.app.name))

groups = [] # list of alert rule groups, one group per relation
for relation in self.model.relations[self._alert_rules_relation]:
unit_rules = self._get_alert_rules(relation)
if unit_rules:
if unit_rules and relation.app:
appname = relation.app.name
rules = self._label_alert_rules(unit_rules, appname)
group = {"name": self._group_name(appname), "rules": rules}
Expand Down Expand Up @@ -2281,13 +2289,13 @@ def _get_transformer_path(self) -> Optional[Path]:
arch = "amd64" if arch == "x86_64" else arch
res = "promql-transform-{}".format(arch)
try:
path = self._charm.model.resources.fetch(res)
os.chmod(path, 0o777)
path = Path(res).resolve()
path.chmod(0o777)
return path
except NotImplementedError:
logger.debug("System lacks support for chmod")
except (NameError, ModelError):
logger.debug('No resource available for the platform "{}"'.format(arch))
except FileNotFoundError:
logger.debug('Could not locate promql transform at: "{}"'.format(res))
return None

def _exec(self, cmd):
Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,3 @@
ops==1.4.0
oci-image
serialized-data-interface<0.4
pytest-mock
4 changes: 2 additions & 2 deletions src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import logging
from random import choices
from string import ascii_uppercase, digits
from base64 import b64encode, b64decode
from base64 import b64encode
from hashlib import sha256

from charms.prometheus_k8s.v0.prometheus_scrape import MetricsEndpointProvider
Expand Down Expand Up @@ -146,7 +146,7 @@ def main(self, event):
spec["kubernetesResources"]["secrets"].append(self._get_ssl_secret())
else:
self.log.info(
"SSL: No secret specified in charm config. Proceeding without SSL."
"SSL: No secret specified in charm config. Proceeding without SSL."
)

self.model.pod.set_spec(spec)
Expand Down
3 changes: 2 additions & 1 deletion test-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@

flake8
black
flake8-copyright<0.3
git+https://github.com/savoirfairelinux/flake8-copyright.git@b84cae576875d5d73436a372c1535fbfb0f89a95
pytest
pytest-mock
pyyaml
tenacity<8.1
requests

0 comments on commit 9486127

Please sign in to comment.