Skip to content

Commit

Permalink
add scaling and integration test
Browse files Browse the repository at this point in the history
  • Loading branch information
kelkawi-a committed Sep 4, 2023
1 parent 029cb2d commit bbf4e41
Show file tree
Hide file tree
Showing 10 changed files with 148 additions and 25 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/promote_charm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ on:

jobs:
promote-charm:
uses: canonical/operator-workflows/.github/workflows/promote_charm.yaml@main
uses: canonical/operator-workflows/.github/workflows/promote_charm.yaml@8892eb826818585b397295e40276ddd0c5d3d459
with:
origin-channel: ${{ github.event.inputs.origin-channel }}
destination-channel: ${{ github.event.inputs.destination-channel }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@ on:

jobs:
unit-tests:
uses: canonical/operator-workflows/.github/workflows/test.yaml@main
uses: canonical/operator-workflows/.github/workflows/test.yaml@8892eb826818585b397295e40276ddd0c5d3d459
secrets: inherit
2 changes: 1 addition & 1 deletion .github/workflows/test_and_publish_charm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:

jobs:
publish-to-edge:
uses: canonical/operator-workflows/.github/workflows/test_and_publish_charm.yaml@main
uses: canonical/operator-workflows/.github/workflows/test_and_publish_charm.yaml@8892eb826818585b397295e40276ddd0c5d3d459
secrets: inherit
with:
integration-test-provider: microk8s
Expand Down
5 changes: 5 additions & 0 deletions actions.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Copyright 2023 Canonical Ltd.
# See LICENSE file for licensing details.

restart:
description: Restart the Temporal Web UI.
38 changes: 32 additions & 6 deletions lib/charms/nginx_ingress_integrator/v0/nginx_route.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Copyright 2023 Canonical Ltd.
# Licensed under the Apache2.0, see LICENCE file in charm source for details.
# Licensed under the Apache2.0. See LICENSE file in charm source for details.
"""Library for the nginx-route relation.
This library contains the require and provide functions for handling
Expand Down Expand Up @@ -34,12 +34,12 @@
```python
from charms.nginx_ingress_integrator.v0.nginx_route import NginxRouteRequirer
# In your charm's `__init__` method.
# In your charm's `__init__` method (assuming your app is listening on port 8080).
require_nginx_route(
charm=self,
service_hostname=self.config["external_hostname"],
service_hostname=self.app.name,
service_name=self.app.name,
service_port=80
service_port=8080
)
```
Expand All @@ -52,6 +52,23 @@
You _must_ require nginx route as part of the `__init__` method
rather than, for instance, a config-changed event handler, for the relation
changed event to be properly handled.
In the example above we're setting `service_hostname` (which translates to the
external hostname for the application when related to nginx-ingress-integrator)
to `self.app.name` here. This ensures by default the charm will be available on
the name of the deployed juju application, but can be overridden in a
production deployment by setting `service-hostname` on the
nginx-ingress-integrator charm. For example:
```bash
juju deploy nginx-ingress-integrator
juju deploy my-charm
juju relate nginx-ingress-integrator my-charm:nginx-route
# The service is now reachable on the ingress IP(s) of your k8s cluster at
# 'http://my-charm'.
juju config nginx-ingress-integrator service-hostname='my-charm.example.com'
# The service is now reachable on the ingress IP(s) of your k8s cluster at
# 'http://my-charm.example.com'.
```
"""
import logging
import typing
Expand All @@ -69,7 +86,7 @@

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 2
LIBPATCH = 3

__all__ = ["require_nginx_route", "provide_nginx_route"]

Expand Down Expand Up @@ -153,7 +170,9 @@ def _config_reconciliation(self, _event: typing.Any = None) -> None:
relation_app_data.update({k: str(v) for k, v in self._config.items()})


def require_nginx_route( # pylint: disable=too-many-locals,too-many-branches
# C901 is ignored since the method has too many ifs but wouldn't be
# necessarily good to reduce to smaller methods.
def require_nginx_route( # pylint: disable=too-many-locals,too-many-branches # noqa: C901
*,
charm: ops.charm.CharmBase,
service_hostname: str,
Expand Down Expand Up @@ -303,6 +322,9 @@ def _on_relation_changed(self, event: ops.charm.RelationChangedEvent) -> None:
Args:
event: Event triggering the relation-changed hook for the relation.
Raises:
RuntimeError: if _on_relation changed is triggered by a broken relation.
"""
# `self.unit` isn't available here, so use `self.model.unit`.
if not self._charm.model.unit.is_leader():
Expand Down Expand Up @@ -376,6 +398,10 @@ def provide_nginx_route(
on_nginx_route_broken: Callback function for the nginx-route-broken event.
nginx_route_relation_name: Specifies the relation name of the relation handled by this
provider class. The relation must have the nginx-route interface.
Raises:
RuntimeError: If provide_nginx_route was invoked twice with
the same nginx-route relation name
"""
if __provider_references.get(charm, {}).get(nginx_route_relation_name) is not None:
raise RuntimeError(
Expand Down
55 changes: 51 additions & 4 deletions src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ def __init__(self, *args):
self._state = State(self.app, lambda: self.model.get_relation("peer"))

# Handle basic charm lifecycle.
self.framework.observe(self.on.peer_relation_changed, self._on_peer_relation_changed)
self.framework.observe(self.on.install, self._on_install)
self.framework.observe(self.on.temporal_ui_pebble_ready, self._on_temporal_ui_pebble_ready)
self.framework.observe(self.on.config_changed, self._on_config_changed)
Expand All @@ -72,6 +73,8 @@ def __init__(self, *args):
self.framework.observe(self.on.ui_relation_changed, self._on_ui_relation_changed)
self.framework.observe(self.on.ui_relation_broken, self._on_ui_relation_broken)

self.framework.observe(self.on.restart_action, self._on_restart)

# Handle Ingress.
self._require_nginx_route()

Expand All @@ -83,7 +86,7 @@ def _require_nginx_route(self):
service_name=self.app.name,
service_port=self.config["port"],
tls_secret_name=self.config["tls-secret-name"],
backend_protocol="HTTPS",
backend_protocol="HTTP",
)

@log_event_handler(logger)
Expand All @@ -104,6 +107,15 @@ def _on_temporal_ui_pebble_ready(self, event):
"""
self._update(event)

@log_event_handler(logger)
def _on_peer_relation_changed(self, event):
"""Handle peer relation changed event.
Args:
event: The event triggered when the relation changed.
"""
self._update(event)

@log_event_handler(logger)
def _on_config_changed(self, event):
"""Handle configuration changes.
Expand All @@ -114,15 +126,38 @@ def _on_config_changed(self, event):
self.unit.status = WaitingStatus("configuring temporal")
self._update(event)

@log_event_handler(logger)
def _on_restart(self, event):
"""Restart Temporal ui action handler.
Args:
event:The event triggered by the restart action
"""
container = self.unit.get_container(self.name)
if not container.can_connect():
event.defer()
return

self.unit.status = MaintenanceStatus("restarting ui")
container.restart(self.name)

event.set_results({"result": "worker successfully restarted"})

@log_event_handler(logger)
def _on_ui_relation_joined(self, event):
"""Handle joining a ui:temporal relation.
Args:
event: The event triggered when the relation changed.
"""
if not self._state.is_ready():
event.defer()
return

self.unit.status = WaitingStatus(f"handling {event.relation.name} change")
self._state.server_status = event.relation.data[event.app].get("server_status")
if self.unit.is_leader():
self._state.server_status = event.relation.data[event.app].get("server_status")

self._update(event)

@log_event_handler(logger)
Expand All @@ -132,7 +167,13 @@ def _on_ui_relation_changed(self, event):
Args:
event: The event triggered when the relation changed.
"""
self._state.server_status = event.relation.data[event.app].get("server_status")
if not self._state.is_ready():
event.defer()
return

if self.unit.is_leader():
self._state.server_status = event.relation.data[event.app].get("server_status")

logger.debug(f"ui:temporal: server is {self._state.server_status}")
self._update(event)

Expand All @@ -143,8 +184,14 @@ def _on_ui_relation_broken(self, event):
Args:
event: The event triggered when the relation changed.
"""
if not self._state.is_ready():
event.defer()
return

self.unit.status = WaitingStatus(f"handling {event.relation.name} removal")
self._state.server_status = "blocked"
if self.unit.is_leader():
self._state.server_status = "blocked"

self._update(event)

def _validate(self):
Expand Down
26 changes: 26 additions & 0 deletions tests/integration/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

import socket

from pytest_operator.plugin import OpsTest


def gen_patch_getaddrinfo(host: str, resolve_to: str): # noqa
"""Generate patched getaddrinfo function.
Expand Down Expand Up @@ -35,3 +37,27 @@ def patched_getaddrinfo(*args):
return original_getaddrinfo(*args)

return patched_getaddrinfo


async def scale(ops_test: OpsTest, app, units):
"""Scale the application to the provided number and wait for idle.
Args:
ops_test: PyTest object.
app: Application to be scaled.
units: Number of units required.
"""
await ops_test.model.applications[app].scale(scale=units)

# Wait for model to settle
await ops_test.model.wait_for_idle(
apps=[app],
status="active",
idle_period=30,
raise_on_error=False,
raise_on_blocked=True,
timeout=300,
wait_for_exact_units=units,
)

assert len(ops_test.model.applications[app].units) == units
34 changes: 27 additions & 7 deletions tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import pytest_asyncio
import requests
import yaml
from helpers import gen_patch_getaddrinfo
from helpers import gen_patch_getaddrinfo, scale
from pytest_operator.plugin import OpsTest

logger = logging.getLogger(__name__)
Expand All @@ -28,16 +28,17 @@
@pytest_asyncio.fixture(name="deploy", scope="module")
async def deploy(ops_test: OpsTest):
"""The app is up and running."""
charm = await ops_test.build_charm(".")
resources = {"temporal-ui-image": METADATA["containers"]["temporal-ui"]["upstream-source"]}

# Deploy temporal server, temporal admin and postgresql charms.
await ops_test.model.deploy(charm, resources=resources, application_name=APP_NAME)
await ops_test.model.deploy(APP_NAME_SERVER, channel="stable")
await ops_test.model.deploy(APP_NAME_ADMIN, channel="stable")
await ops_test.model.deploy("postgresql-k8s", channel="edge", trust=True)
await ops_test.model.deploy("nginx-ingress-integrator", trust=True)

charm = await ops_test.build_charm(".")
resources = {"temporal-ui-image": METADATA["containers"]["temporal-ui"]["upstream-source"]}

await ops_test.model.deploy(charm, resources=resources, application_name=APP_NAME)

async with ops_test.fast_forward():
await ops_test.model.wait_for_idle(
apps=[APP_NAME, APP_NAME_SERVER, APP_NAME_ADMIN],
Expand All @@ -53,8 +54,8 @@ async def deploy(ops_test: OpsTest):
)

assert ops_test.model.applications[APP_NAME].units[0].workload_status == "blocked"
await ops_test.model.integrate(f"{APP_NAME_SERVER}:db", "postgresql-k8s:db")
await ops_test.model.integrate(f"{APP_NAME_SERVER}:visibility", "postgresql-k8s:db")
await ops_test.model.integrate(f"{APP_NAME_SERVER}:db", "postgresql-k8s:database")
await ops_test.model.integrate(f"{APP_NAME_SERVER}:visibility", "postgresql-k8s:database")
await ops_test.model.integrate(f"{APP_NAME_SERVER}:admin", f"{APP_NAME_ADMIN}:admin")

await ops_test.model.wait_for_idle(
Expand Down Expand Up @@ -111,3 +112,22 @@ async def test_ingress(self, ops_test: OpsTest):
with unittest.mock.patch.multiple(socket, getaddrinfo=gen_patch_getaddrinfo(new_hostname, "127.0.0.1")):
response = requests.get(f"https://{new_hostname}", timeout=5, verify=False) # nosec
assert response.status_code == 200 and 'id="svelte"' in response.text.lower()

async def test_scaling_up(self, ops_test: OpsTest):
"""Scale Temporal worker charm up to 2 units."""
await scale(ops_test, app=APP_NAME, units=2)

status = await ops_test.model.get_status() # noqa: F821

for i in range(2):
address = status["applications"][APP_NAME]["units"][f"{APP_NAME}/{i}"]["address"]
url = f"http://{address}:8080"
logger.info("curling app address: %s", url)

response = requests.get(url, timeout=300)
assert response.status_code == 200

hostname = "temporal-web"
with unittest.mock.patch.multiple(socket, getaddrinfo=gen_patch_getaddrinfo(hostname, "127.0.0.1")):
response = requests.get(f"https://{hostname}", timeout=5, verify=False) # nosec
assert response.status_code == 200 and 'id="svelte"' in response.text.lower()
6 changes: 3 additions & 3 deletions tests/unit/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def test_ingress(self):
"service-name": harness.charm.app.name,
"service-port": UI_PORT,
"tls-secret-name": "temporal-tls",
"backend-protocol": "HTTPS",
"backend-protocol": "HTTP",
}

new_hostname = "new-temporal-ui-k8s"
Expand All @@ -110,7 +110,7 @@ def test_ingress(self):
"service-name": harness.charm.app.name,
"service-port": UI_PORT,
"tls-secret-name": "temporal-tls",
"backend-protocol": "HTTPS",
"backend-protocol": "HTTP",
}

new_tls = "new-tls"
Expand All @@ -123,7 +123,7 @@ def test_ingress(self):
"service-name": harness.charm.app.name,
"service-port": UI_PORT,
"tls-secret-name": new_tls,
"backend-protocol": "HTTPS",
"backend-protocol": "HTTP",
}

def test_ready(self):
Expand Down
3 changes: 1 addition & 2 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,9 @@ commands =
description = Run integration tests
deps =
ipdb==0.13.9
juju==3.1.0.1
juju==3.2.0.1
pytest==7.1.3
pytest-operator==0.22.0
temporalio==1.1.0
-r{toxinidir}/requirements.txt
commands =
pytest -v --tb native --ignore={[vars]tst_path}unit --log-cli-level=INFO -s {posargs}

0 comments on commit bbf4e41

Please sign in to comment.