Skip to content

Enabled datasink (redistimeseries) on self-contained-coordinator tool #16

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 18, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Readme.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@

[![codecov](https://codecov.io/gh/filipecosta90/redis-benchmarks-specification/branch/main/graph/badge.svg?token=GS64MV1H4W)](https://codecov.io/gh/filipecosta90/redis-benchmarks-specification)
[![CI tests](https://github.com/filipecosta90/redis-benchmarks-specification/actions/workflows/poetry-ci-test-lint.yml/badge.svg)](https://github.com/filipecosta90/redis-benchmarks-specification/actions/workflows/poetry-ci-test-lint.yml)
[![CI tests](https://github.com/filipecosta90/redis-benchmarks-specification/actions/workflows/tox.yml/badge.svg)](https://github.com/filipecosta90/redis-benchmarks-specification/actions/workflows/tox.yml)
[![PyPI version](https://badge.fury.io/py/redis-benchmarks-specification.svg)](https://badge.fury.io/py/redis-benchmarks-specification)
## Benchmark specifications goal

Expand Down
54 changes: 42 additions & 12 deletions poetry.lock

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
[tool.poetry]
name = "redis-benchmarks-specification"
version = "0.1.2"
version = "0.1.3"
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
authors = ["filipecosta90 <filipecosta.90@gmail.com>"]
readme = "Readme.md"

[tool.poetry.dependencies]
python = "^3.6.1"
Expand All @@ -15,7 +16,7 @@ argparse = "^1.4.0"
Flask-HTTPAuth = "^4.4.0"
PyYAML = "^5.4.1"
docker = "^4.4.4"
redisbench-admin = "^0.4.8"
redisbench-admin = "^0.4.11"
psutil = "^5.8.0"
tox-docker = "^3.0.0"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
import redis
import os
from pathlib import Path
import redistimeseries
from docker.models.containers import Container

from redisbench_admin.run.common import (
get_start_time_vars,
Expand Down Expand Up @@ -79,6 +81,13 @@ def main():
topologies_map = get_topologies(topologies_files[0])
testsuites_folder = os.path.abspath(args.test_suites_folder)
logging.info("Using test-suites folder dir {}".format(testsuites_folder))
testsuite_spec_files = get_benchmark_specs(testsuites_folder)
logging.info(
"There are a total of {} test-suites in folder {}".format(
len(testsuite_spec_files), testsuites_folder
)
)

logging.info(
"Using redis available at: {}:{} to read the event streams".format(
GH_REDIS_SERVER_HOST, GH_REDIS_SERVER_PORT
Expand Down Expand Up @@ -108,7 +117,7 @@ def main():
)
)
try:
rts = redis.StrictRedis(
rts = redistimeseries.client.Client(
host=args.datasink_redistimeseries_host,
port=args.datasink_redistimeseries_port,
decode_responses=True,
Expand All @@ -127,11 +136,36 @@ def main():
exit(1)

logging.info("checking build spec requirements")
build_runners_consumer_group_create(conn)
stream_id = None
docker_client = docker.from_env()
home = str(Path.home())
# TODO: confirm we do have enough cores to run the spec
# availabe_cpus = args.cpu_count
datasink_push_results_redistimeseries = args.datasink_push_results_redistimeseries
logging.info("Entering blocking read waiting for work.")
if stream_id is None:
stream_id = args.consumer_start_id
while True:
_, stream_id, _ = self_contained_coordinator_blocking_read(
conn,
datasink_push_results_redistimeseries,
docker_client,
home,
stream_id,
rts,
testsuite_spec_files,
topologies_map,
)


def build_runners_consumer_group_create(conn, id="$"):
try:
conn.xgroup_create(
STREAM_KEYNAME_NEW_BUILD_EVENTS,
STREAM_GH_NEW_BUILD_RUNNERS_CG,
mkstream=True,
id=id,
)
logging.info(
"Created consumer group named {} to distribute work.".format(
Expand All @@ -144,36 +178,44 @@ def main():
STREAM_GH_NEW_BUILD_RUNNERS_CG
)
)
previous_id = None
docker_client = docker.from_env()
home = str(Path.home())
# TODO: confirm we do have enough cores to run the spec
# availabe_cpus = args.cpu_count
datasink_push_results_redistimeseries = args.datasink_push_results_redistimeseries

while True:
logging.info("Entering blocking read waiting for work.")
if previous_id is None:
previous_id = args.consumer_start_id
newTestInfo = conn.xreadgroup(
STREAM_GH_NEW_BUILD_RUNNERS_CG,
"{}-self-contained-proc#{}".format(STREAM_GH_NEW_BUILD_RUNNERS_CG, "1"),
{STREAM_KEYNAME_NEW_BUILD_EVENTS: previous_id},
count=1,
block=0,
)
if len(newTestInfo[0]) < 2 or len(newTestInfo[0][1]) < 1:
previous_id = ">"
continue
previous_id = process_self_contained_coordinator_stream(

def self_contained_coordinator_blocking_read(
conn,
datasink_push_results_redistimeseries,
docker_client,
home,
stream_id,
rts,
testsuite_spec_files,
topologies_map,
):
num_process_streams = 0
overall_result = False
consumer_name = "{}-self-contained-proc#{}".format(
STREAM_GH_NEW_BUILD_RUNNERS_CG, "1"
)
newTestInfo = conn.xreadgroup(
STREAM_GH_NEW_BUILD_RUNNERS_CG,
consumer_name,
{STREAM_KEYNAME_NEW_BUILD_EVENTS: stream_id},
count=1,
block=0,
)
if len(newTestInfo[0]) < 2 or len(newTestInfo[0][1]) < 1:
stream_id = ">"
else:
stream_id, overall_result = process_self_contained_coordinator_stream(
datasink_push_results_redistimeseries,
docker_client,
home,
newTestInfo,
rts,
testsuites_folder,
testsuite_spec_files,
topologies_map,
)
num_process_streams = num_process_streams + 1
return overall_result, stream_id, num_process_streams


def process_self_contained_coordinator_stream(
Expand All @@ -182,12 +224,14 @@ def process_self_contained_coordinator_stream(
home,
newTestInfo,
rts,
testsuites_folder,
testsuite_spec_files,
topologies_map,
):
stream_id, testDetails = newTestInfo[0][1][0]
stream_id = stream_id.decode()
logging.info("Received work . Stream id {}.".format(stream_id))
overall_result = False

if b"git_hash" in testDetails:
(
build_artifacts,
Expand All @@ -197,9 +241,8 @@ def process_self_contained_coordinator_stream(
run_image,
) = extract_build_info_from_streamdata(testDetails)

files = get_benchmark_specs(testsuites_folder)

for test_file in files:
overall_result = True
for test_file in testsuite_spec_files:
redis_containers = []
client_containers = []

Expand All @@ -213,6 +256,7 @@ def process_self_contained_coordinator_stream(
_,
) = extract_redis_dbconfig_parameters(benchmark_config, "dbconfig")
for topology_spec_name in benchmark_config["redis-topologies"]:
test_result = False
try:
current_cpu_pos = 0
ceil_db_cpu_limit = extract_db_cpu_limit(
Expand Down Expand Up @@ -383,13 +427,13 @@ def process_self_contained_coordinator_stream(
with open(local_benchmark_output_filename, "r") as json_file:
results_dict = json.load(json_file)
logging.info("Final JSON result {}".format(results_dict))

dataset_load_duration_seconds = 0
timeseries_test_sucess_flow(
datasink_push_results_redistimeseries,
git_version,
benchmark_config,
benchmark_duration_seconds,
None,
dataset_load_duration_seconds,
None,
topology_spec_name,
None,
Expand All @@ -404,6 +448,7 @@ def process_self_contained_coordinator_stream(
tf_triggering_env,
tsname_project_total_success,
)
test_result = True

except:
logging.critical(
Expand All @@ -414,18 +459,38 @@ def process_self_contained_coordinator_stream(
print("-" * 60)
traceback.print_exc(file=sys.stdout)
print("-" * 60)
test_result = False
# tear-down
logging.info("Tearing down setup")
for container in redis_containers:
container.stop()
for container in client_containers:
if type(container) != bytes:
try:
container.stop()
except docker.errors.NotFound:
logging.info(
"When trying to stop DB container with id {} and image {} it was already stopped".format(
container.id, container.image
)
)
pass

for container in client_containers:
if type(container) == Container:
try:
container.stop()
except docker.errors.NotFound:
logging.info(
"When trying to stop Client container with id {} and image {} it was already stopped".format(
container.id, container.image
)
)
pass
shutil.rmtree(temporary_dir, ignore_errors=True)

overall_result &= test_result

else:
logging.error("Missing commit information within received message.")
return stream_id
return stream_id, overall_result


def get_benchmark_specs(testsuites_folder):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ exporter:
- commit
timemetric: "$.StartTime"
metrics:
- "$.Tests.Overall.rps"
- "$.Tests.Overall.avg_latency_ms"
- "$.Tests.Overall.p50_latency_ms"
- "$.Tests.Overall.p95_latency_ms"
- "$.Tests.Overall.p99_latency_ms"
- "$.Tests.Overall.max_latency_ms"
- "$.Tests.Overall.min_latency_ms"
- "$.Tests.*.rps"
- "$.Tests.*.avg_latency_ms"
- "$.Tests.*.p50_latency_ms"
- "$.Tests.*.p95_latency_ms"
- "$.Tests.*.p99_latency_ms"
- "$.Tests.*.max_latency_ms"
- "$.Tests.*.min_latency_ms"
13 changes: 3 additions & 10 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -1,13 +1,6 @@
[testenv:integration-tests]
passenv = *

deps =
pytest
pytest-cov
codecov
black
flake8

deps = -r{toxinidir}/utils/test-requirements.txt
passenv = TST_BUILDER_X TST_RUNNER_X

commands =
black --check redis_benchmarks_specification
Expand All @@ -22,6 +15,6 @@ docker =
[docker:rts_datasink]
image = redislabs/redistimeseries:1.4.7
ports =
6379:6379/tcp
16379:6379/tcp
volumes =
bind:rw:{toxinidir}/utils/tests/test_data/:/data
5 changes: 5 additions & 0 deletions utils/test-requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pytest
pytest-cov
codecov
black
flake8
13 changes: 8 additions & 5 deletions utils/tests/test_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,22 +15,25 @@
from redis_benchmarks_specification.__common__.env import (
STREAM_KEYNAME_GH_EVENTS_COMMIT,
STREAM_KEYNAME_NEW_BUILD_EVENTS,
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
)


def test_commit_schema_to_stream_then_build():
try:
skip_builder = False
if skip_builder is not True:
conn = redis.StrictRedis()
run_builder = True
TST_BUILDER_X = os.getenv("TST_BUILDER_X", "1")
if TST_BUILDER_X == "0":
run_builder = False
if run_builder:
conn = redis.StrictRedis(port=16379)
conn.ping()
conn.flushall()
builder_consumer_group_create(conn)
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 0

result, reply_fields, error_msg = commit_schema_to_stream(
'{"git_hash":"0cf2df84d4b27af4bffd2bf3543838f09e10f874"}', conn
'{"git_hash":"0cf2df84d4b27af4bffd2bf3543838f09e10f874", "git_branch":"unstable"}',
conn,
)
assert result == True
assert error_msg == None
Expand Down
30 changes: 30 additions & 0 deletions utils/tests/test_data/api_builder_common.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from redis_benchmarks_specification.__api__.api import commit_schema_to_stream
from redis_benchmarks_specification.__builder__.builder import (
builder_consumer_group_create,
builder_process_stream,
)
from redis_benchmarks_specification.__common__.env import (
STREAM_KEYNAME_GH_EVENTS_COMMIT,
STREAM_KEYNAME_NEW_BUILD_EVENTS,
)


def flow_1_and_2_api_builder_checks(conn):
builder_consumer_group_create(conn)
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 0
result, reply_fields, error_msg = commit_schema_to_stream(
'{"git_hash":"0cf2df84d4b27af4bffd2bf3543838f09e10f874", "git_branch":"unstable"}',
conn,
)
assert result == True
assert error_msg == None
assert STREAM_KEYNAME_GH_EVENTS_COMMIT.encode() in conn.keys()
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 1
assert "id" in reply_fields
builders_folder = "./redis_benchmarks_specification/setups/builders"
different_build_specs = ["gcc:8.5.0-amd64-debian-buster-default.yml"]
previous_id = ">"
previous_id, new_builds_count = builder_process_stream(
builders_folder, conn, different_build_specs, previous_id
)
assert new_builds_count == 1
Binary file modified utils/tests/test_data/dump.rdb
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,17 @@ clientconfig:
requests:
cpus: "2"
memory: "2g"
exporter:
redistimeseries:
break_by:
- version
- commit
timemetric: "$.StartTime"
metrics:
- "$.Tests.*.rps"
- "$.Tests.*.avg_latency_ms"
- "$.Tests.*.p50_latency_ms"
- "$.Tests.*.p95_latency_ms"
- "$.Tests.*.p99_latency_ms"
- "$.Tests.*.max_latency_ms"
- "$.Tests.*.min_latency_ms"
Loading