Skip to content

Self-contained-coordinator datasink related issues (build variant, runner info, metadata) fixes #21

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Aug 19, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "redis-benchmarks-specification"
version = "0.1.3"
version = "0.1.4"
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
authors = ["filipecosta90 <filipecosta.90@gmail.com>"]
readme = "Readme.md"
Expand All @@ -16,7 +16,7 @@ argparse = "^1.4.0"
Flask-HTTPAuth = "^4.4.0"
PyYAML = "^5.4.1"
docker = "^4.4.4"
redisbench-admin = "^0.4.11"
redisbench-admin = "^0.4.14"
psutil = "^5.8.0"
tox-docker = "^3.0.0"

Expand Down
50 changes: 38 additions & 12 deletions redis_benchmarks_specification/__builder__/builder.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
import argparse
import io
import json
import logging
import tempfile
import shutil

import docker
import redis
import os
from zipfile import ZipFile, ZipInfo

from redis_benchmarks_specification.__builder__.schema import get_build_config
from redis_benchmarks_specification.__builder__.schema import (
get_build_config,
get_build_config_metadata,
)
from redis_benchmarks_specification.__common__.env import (
STREAM_KEYNAME_GH_EVENTS_COMMIT,
GH_REDIS_SERVER_HOST,
Expand Down Expand Up @@ -170,6 +173,8 @@ def builder_process_stream(builders_folder, conn, different_build_specs, previou

for build_spec in different_build_specs:
build_config, id = get_build_config(builders_folder + "/" + build_spec)
build_config_metadata = get_build_config_metadata(build_config)

build_image = build_config["build_image"]
run_image = build_image
if "run_image" in build_config:
Expand Down Expand Up @@ -242,6 +247,7 @@ def builder_process_stream(builders_folder, conn, different_build_specs, previou
"arch": build_arch,
"build_vars": build_vars_str,
"build_command": build_command,
"metadata": json.dumps(build_config_metadata),
"build_artifacts": ",".join(build_artifacts),
}
if git_branch is not None:
Expand All @@ -266,11 +272,25 @@ def builder_process_stream(builders_folder, conn, different_build_specs, previou
)
shutil.rmtree(temporary_dir, ignore_errors=True)
new_builds_count = new_builds_count + 1
conn.xack(
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
ack_reply = conn.xack(
STREAM_KEYNAME_GH_EVENTS_COMMIT,
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
streamId,
)
if type(ack_reply) == bytes:
ack_reply = ack_reply.decode()
if ack_reply == "1":
logging.info(
"Sucessfully acknowledge build variation stream with id {}.".format(
streamId
)
)
else:
logging.error(
"Unable to acknowledge build variation stream with id {}. XACK reply {}".format(
streamId, ack_reply
)
)
else:
logging.error("Missing commit information within received message.")
return previous_id, new_builds_count
Expand All @@ -279,6 +299,7 @@ def builder_process_stream(builders_folder, conn, different_build_specs, previou
def build_spec_image_prefetch(builders_folder, different_build_specs):
logging.info("checking build spec requirements")
already_checked_images = []
client = docker.from_env()
for build_spec in different_build_specs:
build_config, id = get_build_config(builders_folder + "/" + build_spec)
if build_config["kind"] == "docker":
Expand All @@ -289,19 +310,24 @@ def build_spec_image_prefetch(builders_folder, different_build_specs):
id, build_image
)
)
import docker

client = docker.from_env()
image = client.images.pull(build_image)
logging.info(
"Build {} requirement: build image {} is available with id: {}.".format(
id, build_image, image.id
if build_image not in client.images.list():
logging.info(
"Build {} requirement: build image {} is not available locally. Fetching it from hub".format(
id, build_image
)
)
client.images.pull(build_image)
else:
logging.info(
"Build {} requirement: build image {} is available locally.".format(
id, build_image
)
)
)
already_checked_images.append(build_image)
else:
logging.info(
"Build {} requirement: build image {} availability was already checked.".format(
id, build_image
)
)
return already_checked_images
7 changes: 7 additions & 0 deletions redis_benchmarks_specification/__builder__/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,10 @@ def get_build_config(usecase_filename):
# print(build_config)
id = build_config["id"]
return build_config, id


def get_build_config_metadata(build_config):
build_config_metadata = {}
if "metadata" in build_config:
build_config_metadata = build_config["metadata"]
return build_config_metadata
1 change: 1 addition & 0 deletions redis_benchmarks_specification/__common__/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,3 +62,4 @@
LOG_LEVEL = logging.WARN

MACHINE_CPU_COUNT = psutil.cpu_count()
MACHINE_NAME = os.uname()[1]
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
DATASINK_RTS_AUTH,
DATASINK_RTS_USER,
DATASINK_RTS_PUSH,
MACHINE_NAME,
)


Expand All @@ -23,6 +24,12 @@ def create_self_contained_coordinator_args():
default=MACHINE_CPU_COUNT,
help="Specify how much of the available CPU resources the coordinator can use.",
)
parser.add_argument(
"--platform-name",
type=str,
default=MACHINE_NAME,
help="Specify the running platform name. By default it will use the machine name.",
)
parser.add_argument(
"--logname", type=str, default=None, help="logname to write the logs to"
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ def main():
)
logging.error("Error message {}".format(e.__str__()))
exit(1)
rts = None
if args.datasink_push_results_redistimeseries:
logging.info(
"Checking redistimeseries datasink connection is available at: {}:{} to push the timeseries data".format(
Expand All @@ -124,7 +125,7 @@ def main():
password=args.datasink_redistimeseries_pass,
username=args.datasink_redistimeseries_user,
)
rts.ping()
rts.redis.ping()
except redis.exceptions.ConnectionError as e:
logging.error(
"Unable to connect to redis available at: {}:{}".format(
Expand Down Expand Up @@ -156,6 +157,7 @@ def main():
rts,
testsuite_spec_files,
topologies_map,
args.platform_name,
)


Expand Down Expand Up @@ -189,6 +191,7 @@ def self_contained_coordinator_blocking_read(
rts,
testsuite_spec_files,
topologies_map,
platform_name,
):
num_process_streams = 0
overall_result = False
Expand All @@ -213,8 +216,29 @@ def self_contained_coordinator_blocking_read(
rts,
testsuite_spec_files,
topologies_map,
platform_name,
)
num_process_streams = num_process_streams + 1
if overall_result is True:
ack_reply = conn.xack(
STREAM_KEYNAME_NEW_BUILD_EVENTS,
STREAM_GH_NEW_BUILD_RUNNERS_CG,
stream_id,
)
if type(ack_reply) == bytes:
ack_reply = ack_reply.decode()
if ack_reply == "1":
logging.info(
"Sucessfully acknowledge build variation stream with id {}.".format(
stream_id
)
)
else:
logging.error(
"Unable to acknowledge build variation stream with id {}. XACK reply {}".format(
stream_id, ack_reply
)
)
return overall_result, stream_id, num_process_streams


Expand All @@ -226,6 +250,7 @@ def process_self_contained_coordinator_stream(
rts,
testsuite_spec_files,
topologies_map,
running_platform,
):
stream_id, testDetails = newTestInfo[0][1][0]
stream_id = stream_id.decode()
Expand All @@ -234,6 +259,8 @@ def process_self_contained_coordinator_stream(

if b"git_hash" in testDetails:
(
build_variant_name,
metadata,
build_artifacts,
git_hash,
git_branch,
Expand Down Expand Up @@ -276,8 +303,14 @@ def process_self_contained_coordinator_stream(
testcases_setname,
tsname_project_total_failures,
tsname_project_total_success,
running_platforms_setname,
testcases_build_variant_setname,
) = get_overall_dashboard_keynames(
tf_github_org, tf_github_repo, tf_triggering_env
tf_github_org,
tf_github_repo,
tf_triggering_env,
build_variant_name,
running_platform,
)

benchmark_tool = "redis-benchmark"
Expand Down Expand Up @@ -447,6 +480,9 @@ def process_self_contained_coordinator_stream(
tf_github_repo,
tf_triggering_env,
tsname_project_total_success,
metadata,
build_variant_name,
running_platform,
)
test_result = True

Expand Down Expand Up @@ -505,7 +541,13 @@ def get_benchmark_specs(testsuites_folder):
def extract_build_info_from_streamdata(testDetails):
git_version = None
git_branch = None
metadata = None
build_variant_name = None
git_hash = testDetails[b"git_hash"]
if b"id" in testDetails:
build_variant_name = testDetails[b"id"]
if type(build_variant_name) == bytes:
build_variant_name = build_variant_name.decode()
if b"git_branch" in testDetails:
git_branch = testDetails[b"git_branch"]
if type(git_branch) == bytes:
Expand All @@ -525,7 +567,17 @@ def extract_build_info_from_streamdata(testDetails):
if b"build_artifacts" in testDetails:
build_artifacts_str = testDetails[b"build_artifacts"].decode()
build_artifacts = build_artifacts_str.split(",")
return build_artifacts, git_hash, git_branch, git_version, run_image
if b"metadata" in testDetails:
metadata = json.loads(testDetails[b"metadata"].decode())
return (
build_variant_name,
metadata,
build_artifacts,
git_hash,
git_branch,
git_version,
run_image,
)


def generate_cpuset_cpus(ceil_db_cpu_limit, current_cpu_pos):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: 0.4
name: "redis-benchmark-full-suite-1Mkeys-100B"
description: "Runs the default redis-benchmark test suite, for a keyspace length of 1M keys
with a data size of 100 Bytes for each key. On total 50 concurrent connections
will be used, sending 5M requests."
will be used, sending 1M requests."
dbconfig:
- configuration-parameters:
- save: '""'
Expand All @@ -29,13 +29,13 @@ clientconfig:
min-tool-version: "6.2.0"
parameters:
- clients: 50
- requests: 100000
- threads: 2
- requests: 1000000
- threads: 3
- pipeline: 1
- r: 1000000
resources:
requests:
cpus: "2"
cpus: "3"
memory: "2g"
exporter:
redistimeseries:
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[testenv:integration-tests]
deps = -r{toxinidir}/utils/test-requirements.txt
passenv = TST_BUILDER_X TST_RUNNER_X
passenv = TST_BUILDER_X TST_RUNNER_X TST_RUNNER_USE_RDB

commands =
black --check redis_benchmarks_specification
Expand Down
10 changes: 10 additions & 0 deletions utils/tests/test_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,23 @@
from redis_benchmarks_specification.__builder__.builder import (
builder_consumer_group_create,
builder_process_stream,
build_spec_image_prefetch,
)
from redis_benchmarks_specification.__common__.env import (
STREAM_KEYNAME_GH_EVENTS_COMMIT,
STREAM_KEYNAME_NEW_BUILD_EVENTS,
)


def test_build_spec_image_prefetch():
builders_folder = "./redis_benchmarks_specification/setups/builders"
different_build_specs = ["gcc:8.5.0-amd64-debian-buster-default.yml"]
prefetched_images = build_spec_image_prefetch(
builders_folder, different_build_specs
)
assert "gcc:8.5.0-buster" in prefetched_images


def test_commit_schema_to_stream_then_build():
try:
run_builder = True
Expand Down
Loading