Skip to content

Self-contained-coordinator consumer group fixes and datasink helper setnames now being set to redistimeseries #29

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Aug 20, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "redis-benchmarks-specification"
version = "0.1.6"
version = "0.1.7"
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
authors = ["filipecosta90 <filipecosta.90@gmail.com>"]
readme = "Readme.md"
Expand All @@ -16,7 +16,7 @@ argparse = "^1.4.0"
Flask-HTTPAuth = "^4.4.0"
PyYAML = "^5.4.1"
docker = "^4.4.4"
redisbench-admin = "^0.4.14"
redisbench-admin = "^0.4.15"
psutil = "^5.8.0"
tox-docker = "^3.0.0"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
get_final_benchmark_config,
)
from redisbench_admin.utils.local import get_local_run_full_filename
from redisbench_admin.utils.remote import get_overall_dashboard_keynames
from redisbench_admin.utils.results import post_process_benchmark_results

from redis_benchmarks_specification.__common__.env import (
Expand Down Expand Up @@ -146,7 +145,8 @@ def main():
exit(1)

logging.info("checking build spec requirements")
build_runners_consumer_group_create(conn)
running_platform = args.platform_name
build_runners_consumer_group_create(conn, running_platform)
stream_id = None
docker_client = docker.from_env()
home = str(Path.home())
Expand All @@ -166,31 +166,38 @@ def main():
rts,
testsuite_spec_files,
topologies_map,
args.platform_name,
running_platform,
)


def build_runners_consumer_group_create(conn, id="$"):
def build_runners_consumer_group_create(conn, running_platform, id="$"):
consumer_group_name = get_runners_consumer_group_name(running_platform)
logging.info("Will use consumer group named {}.".format(consumer_group_name))
try:
conn.xgroup_create(
STREAM_KEYNAME_NEW_BUILD_EVENTS,
STREAM_GH_NEW_BUILD_RUNNERS_CG,
consumer_group_name,
mkstream=True,
id=id,
)
logging.info(
"Created consumer group named {} to distribute work.".format(
STREAM_GH_NEW_BUILD_RUNNERS_CG
consumer_group_name
)
)
except redis.exceptions.ResponseError:
logging.info(
"Consumer group named {} already existed.".format(
STREAM_GH_NEW_BUILD_RUNNERS_CG
)
"Consumer group named {} already existed.".format(consumer_group_name)
)


def get_runners_consumer_group_name(running_platform):
consumer_group_name = "{}-{}".format(
STREAM_GH_NEW_BUILD_RUNNERS_CG, running_platform
)
return consumer_group_name


def self_contained_coordinator_blocking_read(
conn,
datasink_push_results_redistimeseries,
Expand All @@ -205,10 +212,10 @@ def self_contained_coordinator_blocking_read(
num_process_streams = 0
overall_result = False
consumer_name = "{}-self-contained-proc#{}".format(
STREAM_GH_NEW_BUILD_RUNNERS_CG, "1"
get_runners_consumer_group_name(platform_name), "1"
)
newTestInfo = conn.xreadgroup(
STREAM_GH_NEW_BUILD_RUNNERS_CG,
get_runners_consumer_group_name(platform_name),
consumer_name,
{STREAM_KEYNAME_NEW_BUILD_EVENTS: stream_id},
count=1,
Expand Down Expand Up @@ -307,20 +314,6 @@ def process_self_contained_coordinator_stream(
tf_github_org = "redis"
tf_github_repo = "redis"
tf_triggering_env = "ci"
(
prefix,
testcases_setname,
tsname_project_total_failures,
tsname_project_total_success,
running_platforms_setname,
testcases_build_variant_setname,
) = get_overall_dashboard_keynames(
tf_github_org,
tf_github_repo,
tf_triggering_env,
build_variant_name,
running_platform,
)

benchmark_tool = "redis-benchmark"
for build_artifact in build_artifacts:
Expand Down Expand Up @@ -483,12 +476,10 @@ def process_self_contained_coordinator_stream(
rts,
start_time_ms,
test_name,
testcases_setname,
git_branch,
tf_github_org,
tf_github_repo,
tf_triggering_env,
tsname_project_total_success,
metadata,
build_variant_name,
running_platform,
Expand Down
Binary file modified utils/tests/test_data/dump.rdb
Binary file not shown.
43 changes: 41 additions & 2 deletions utils/tests/test_self_contained_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import yaml
from pathlib import Path

from redisbench_admin.utils.remote import get_overall_dashboard_keynames
from redisbench_admin.utils.utils import get_ts_metric_name

from redis_benchmarks_specification.__api__.schema import commit_schema_to_stream
Expand Down Expand Up @@ -82,13 +83,13 @@ def test_self_contained_coordinator_blocking_read():

assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
running_platform = "fco-ThinkPad-T490"

build_runners_consumer_group_create(conn, "0")
build_runners_consumer_group_create(conn, running_platform, "0")
rts = redistimeseries.client.Client(port=16379)
docker_client = docker.from_env()
home = str(Path.home())
stream_id = ">"
running_platform = "fco-ThinkPad-T490"
topologies_map = get_topologies(
"./redis_benchmarks_specification/setups/topologies/topologies.yml"
)
Expand Down Expand Up @@ -140,5 +141,43 @@ def test_self_contained_coordinator_blocking_read():

assert ts_key_name.encode() in conn.keys()

(
prefix,
testcases_setname,
tsname_project_total_failures,
tsname_project_total_success,
running_platforms_setname,
build_variant_setname,
testcases_metric_context_path_setname,
testcases_and_metric_context_path_setname,
) = get_overall_dashboard_keynames(
tf_github_org, tf_github_repo, tf_triggering_env, test_name
)
assert rts.redis.exists(testcases_setname)
assert rts.redis.exists(running_platforms_setname)
assert rts.redis.exists(build_variant_setname)
assert build_variant_name.encode() in rts.redis.smembers(
build_variant_setname
)
assert test_name.encode() in rts.redis.smembers(testcases_setname)
assert running_platform.encode() in rts.redis.smembers(
running_platforms_setname
)
testcases_and_metric_context_path_members = [
x.decode()
for x in rts.redis.smembers(testcases_and_metric_context_path_setname)
]
metric_context_path_members = [
x.decode()
for x in rts.redis.smembers(testcases_metric_context_path_setname)
]
assert len(testcases_and_metric_context_path_members) == len(
metric_context_path_members
)

assert [x.decode() for x in rts.redis.smembers(testcases_setname)] == [
test_name
]

except redis.exceptions.ConnectionError:
pass