Skip to content

Commit

Permalink
add status check after restart
Browse files Browse the repository at this point in the history
  • Loading branch information
kelkawi-a committed Sep 18, 2023
1 parent bbf4e41 commit 1e8439c
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 57 deletions.
33 changes: 32 additions & 1 deletion src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from ops import main
from ops.charm import CharmBase
from ops.model import ActiveStatus, BlockedStatus, MaintenanceStatus, WaitingStatus
from ops.pebble import CheckStatus

from log import log_event_handler
from state import State
Expand Down Expand Up @@ -74,6 +75,7 @@ def __init__(self, *args):
self.framework.observe(self.on.ui_relation_broken, self._on_ui_relation_broken)

self.framework.observe(self.on.restart_action, self._on_restart)
self.framework.observe(self.on.update_status, self._on_update_status)

# Handle Ingress.
self._require_nginx_route()
Expand Down Expand Up @@ -143,6 +145,27 @@ def _on_restart(self, event):

event.set_results({"result": "worker successfully restarted"})

@log_event_handler(logger)
def _on_update_status(self, event):
"""Handle `update-status` events.
Args:
event: The `update-status` event triggered at intervals.
"""
try:
self._validate()
except ValueError:
return

container = self.unit.get_container(self.name)

check = container.get_check("up")
if check.status != CheckStatus.UP:
self.unit.status = MaintenanceStatus("Status check: DOWN")
return

self.unit.status = ActiveStatus()

@log_event_handler(logger)
def _on_ui_relation_joined(self, event):
"""Handle joining a ui:temporal relation.
Expand Down Expand Up @@ -271,14 +294,22 @@ def _update(self, event):
# Including config values here so that a change in the
# config forces replanning to restart the service.
"environment": context,
"on-check-failure": {"up": "ignore"},
}
},
"checks": {
"up": {
"override": "replace",
"period": "10s",
"http": {"url": f"http://localhost:{self.config['port']}/"},
}
},
}

container.add_layer(self.name, pebble_layer, combine=True)
container.replan()

self.unit.status = ActiveStatus()
self.unit.status = MaintenanceStatus("replanning application")


if __name__ == "__main__": # pragma: nocover
Expand Down
23 changes: 12 additions & 11 deletions tests/integration/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,14 +50,15 @@ async def scale(ops_test: OpsTest, app, units):
await ops_test.model.applications[app].scale(scale=units)

# Wait for model to settle
await ops_test.model.wait_for_idle(
apps=[app],
status="active",
idle_period=30,
raise_on_error=False,
raise_on_blocked=True,
timeout=300,
wait_for_exact_units=units,
)

assert len(ops_test.model.applications[app].units) == units
async with ops_test.fast_forward():
await ops_test.model.wait_for_idle(
apps=[app],
status="active",
idle_period=30,
raise_on_error=False,
raise_on_blocked=True,
timeout=600,
wait_for_exact_units=units,
)

assert len(ops_test.model.applications[app].units) == units
60 changes: 35 additions & 25 deletions tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

"""Temporal UI charm integration tests."""

import asyncio
import logging
import socket
import unittest.mock
Expand All @@ -29,10 +30,12 @@
async def deploy(ops_test: OpsTest):
"""The app is up and running."""
# Deploy temporal server, temporal admin and postgresql charms.
await ops_test.model.deploy(APP_NAME_SERVER, channel="stable")
await ops_test.model.deploy(APP_NAME_ADMIN, channel="stable")
await ops_test.model.deploy("postgresql-k8s", channel="edge", trust=True)
await ops_test.model.deploy("nginx-ingress-integrator", trust=True)
asyncio.gather(
ops_test.model.deploy(APP_NAME_SERVER, channel="stable"),
ops_test.model.deploy(APP_NAME_ADMIN, channel="stable"),
ops_test.model.deploy("postgresql-k8s", channel="14", trust=True),
ops_test.model.deploy("nginx-ingress-integrator", channel="edge", revision=71, trust=True),
)

charm = await ops_test.build_charm(".")
resources = {"temporal-ui-image": METADATA["containers"]["temporal-ui"]["upstream-source"]}
Expand Down Expand Up @@ -106,28 +109,35 @@ async def test_ingress(self, ops_test: OpsTest):
new_hostname = "temporal-web"
application = ops_test.model.applications[APP_NAME]
await application.set_config({"external-hostname": new_hostname})
await ops_test.model.wait_for_idle(
apps=[APP_NAME, "nginx-ingress-integrator"], status="active", raise_on_blocked=False, timeout=600
)
with unittest.mock.patch.multiple(socket, getaddrinfo=gen_patch_getaddrinfo(new_hostname, "127.0.0.1")):
response = requests.get(f"https://{new_hostname}", timeout=5, verify=False) # nosec
assert response.status_code == 200 and 'id="svelte"' in response.text.lower()

async with ops_test.fast_forward():
await ops_test.model.wait_for_idle(
apps=[APP_NAME, "nginx-ingress-integrator"],
status="active",
raise_on_blocked=False,
idle_period=30,
timeout=1200,
)

with unittest.mock.patch.multiple(socket, getaddrinfo=gen_patch_getaddrinfo(new_hostname, "127.0.0.1")):
response = requests.get(f"https://{new_hostname}", timeout=5, verify=False) # nosec
assert response.status_code == 200 and 'id="svelte"' in response.text.lower()

async def test_restart_action(self, ops_test: OpsTest):
"""Test charm restart action."""
action = await ops_test.model.applications[APP_NAME].units[0].run_action("restart")
await action.wait()

async with ops_test.fast_forward():
await ops_test.model.wait_for_idle(
apps=[APP_NAME],
status="active",
raise_on_blocked=False,
timeout=600,
)

assert ops_test.model.applications[APP_NAME].units[0].workload_status == "active"

async def test_scaling_up(self, ops_test: OpsTest):
"""Scale Temporal worker charm up to 2 units."""
await scale(ops_test, app=APP_NAME, units=2)

status = await ops_test.model.get_status() # noqa: F821

for i in range(2):
address = status["applications"][APP_NAME]["units"][f"{APP_NAME}/{i}"]["address"]
url = f"http://{address}:8080"
logger.info("curling app address: %s", url)

response = requests.get(url, timeout=300)
assert response.status_code == 200

hostname = "temporal-web"
with unittest.mock.patch.multiple(socket, getaddrinfo=gen_patch_getaddrinfo(hostname, "127.0.0.1")):
response = requests.get(f"https://{hostname}", timeout=5, verify=False) # nosec
assert response.status_code == 200 and 'id="svelte"' in response.text.lower()
64 changes: 44 additions & 20 deletions tests/unit/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@
# pylint:disable=protected-access

import json
from unittest import TestCase
from unittest import TestCase, mock

from ops.model import BlockedStatus
from ops.model import ActiveStatus, BlockedStatus, MaintenanceStatus
from ops.pebble import CheckStatus
from ops.testing import Harness

from charm import TemporalUiK8SOperatorCharm
Expand Down Expand Up @@ -80,12 +81,6 @@ def test_ingress(self):
"""The charm relates correctly to the nginx ingress charm and can be configured."""
harness = self.harness

# Simulate peer relation readiness.
harness.add_relation("peer", "temporal")

# Add the temporal relation.
harness.add_relation("ui", "temporal")

simulate_lifecycle(harness)

nginx_route_relation_id = harness.add_relation("nginx-route", "ingress")
Expand Down Expand Up @@ -130,12 +125,6 @@ def test_ready(self):
"""The pebble plan is correctly generated when the charm is ready."""
harness = self.harness

# Simulate peer relation readiness.
harness.add_relation("peer", "temporal")

# Add the temporal relation.
harness.add_relation("ui", "temporal")

simulate_lifecycle(harness)

# The plan is generated after pebble is ready.
Expand All @@ -152,6 +141,7 @@ def test_ready(self):
"TEMPORAL_DEFAULT_NAMESPACE": "default",
"TEMPORAL_AUTH_ENABLED": False,
},
"on-check-failure": {"up": "ignore"},
}
},
}
Expand All @@ -167,12 +157,6 @@ def test_auth(self):
"""The pebble plan is correctly generated when the charm is ready."""
harness = self.harness

# Simulate peer relation readiness.
harness.add_relation("peer", "temporal")

# Add the temporal relation.
harness.add_relation("ui", "temporal")

simulate_lifecycle(harness)
harness.add_relation("nginx-route", "ingress")

Expand Down Expand Up @@ -204,6 +188,14 @@ def test_auth(self):
"TEMPORAL_AUTH_SCOPES": "[openid,profile,email]",
"TEMPORAL_AUTH_CALLBACK_URL": f"https://{harness.model.config['external-hostname']}/auth/sso/callback",
},
# "checks": {
# "up": {
# "override": "replace",
# "period": "10s",
# "http": {"url": "http://localhost:8080/"},
# }
# },
"on-check-failure": {"up": "ignore"},
}
},
}
Expand All @@ -215,6 +207,32 @@ def test_auth(self):
service = harness.model.unit.get_container(APP_NAME).get_service(APP_NAME)
self.assertTrue(service.is_running())

def test_update_status_up(self):
"""The charm updates the unit status to active based on UP status."""
harness = self.harness

simulate_lifecycle(harness)

container = harness.model.unit.get_container(APP_NAME)
container.get_check = mock.Mock(status="up")
container.get_check.return_value.status = CheckStatus.UP
harness.charm.on.update_status.emit()

self.assertEqual(harness.model.unit.status, ActiveStatus())

def test_update_status_down(self):
"""The charm updates the unit status to maintenance based on DOWN status."""
harness = self.harness

simulate_lifecycle(harness)

container = harness.model.unit.get_container(APP_NAME)
container.get_check = mock.Mock(status="up")
container.get_check.return_value.status = CheckStatus.DOWN
harness.charm.on.update_status.emit()

self.assertEqual(harness.model.unit.status, MaintenanceStatus("Status check: DOWN"))


def simulate_lifecycle(harness):
"""Simulate a healthy charm life-cycle.
Expand All @@ -226,6 +244,12 @@ def simulate_lifecycle(harness):
container = harness.model.unit.get_container(APP_NAME)
harness.charm.on.temporal_ui_pebble_ready.emit(container)

# Simulate peer relation readiness.
harness.add_relation("peer", "temporal")

# Add the temporal relation.
harness.add_relation("ui", "temporal")

# Simulate server readiness.
app = type("App", (), {"name": "temporal-ui-k8s"})()
relation = type(
Expand Down

0 comments on commit 1e8439c

Please sign in to comment.