Skip to content

Fixed self contained coordinator circular dependency import #121

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Oct 3, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "redis-benchmarks-specification"
version = "0.1.41"
version = "0.1.50"
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
authors = ["filipecosta90 <filipecosta.90@gmail.com>","Redis Performance Group <performance@redis.com>"]
readme = "Readme.md"
Expand All @@ -19,7 +19,6 @@ docker = "^5.0.0"
redisbench-admin = "^0.9.3"
#redisbench-admin = {path = "../redisbench-admin", develop = true}
psutil = "^5.8.0"
tox-docker = "^3.1.0"
PyGithub = "^1.55"
GitPython = "^3.1.20"
semver = "^2.13.0"
Expand Down
73 changes: 43 additions & 30 deletions redis_benchmarks_specification/__builder__/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,35 +381,48 @@ def build_spec_image_prefetch(builders_folder, different_build_specs):
build_config, id = get_build_config(builders_folder + "/" + build_spec)
if build_config["kind"] == "docker":
build_image = build_config["build_image"]
if build_image not in already_checked_images:
logging.info(
"Build {} requirement: checking build image {} is available.".format(
id, build_image
)
)
local_images = [
x.tags[0]
for x in client.images.list(filters={"reference": build_image})
]
if build_image not in local_images:
logging.info(
"Build {} requirement: build image {} is not available locally. Fetching it from hub".format(
id, build_image
)
)
client.images.pull(build_image)
hub_pulled_images = hub_pulled_images + 1
else:
logging.info(
"Build {} requirement: build image {} is available locally.".format(
id, build_image
)
)
already_checked_images.append(build_image)
else:
logging.info(
"Build {} requirement: build image {} availability was already checked.".format(
id, build_image
)
hub_pulled_images = check_docker_image_available(
already_checked_images, build_image, client, hub_pulled_images, id
)
if "run_image" in build_config:
run_image = build_config["run_image"]
hub_pulled_images = check_docker_image_available(
already_checked_images, run_image, client, hub_pulled_images, id
)
return already_checked_images, hub_pulled_images


def check_docker_image_available(
already_checked_images, build_image, client, hub_pulled_images, id
):
if build_image not in already_checked_images:
logging.info(
"Build {} requirement: checking docker image {} is available.".format(
id, build_image
)
)
local_images = [
x.tags[0] for x in client.images.list(filters={"reference": build_image})
]
if build_image not in local_images:
logging.info(
"Build {} requirement: docker image {} is not available locally. Fetching it from hub".format(
id, build_image
)
)
client.images.pull(build_image)
hub_pulled_images = hub_pulled_images + 1
else:
logging.info(
"Build {} requirement: docker image {} is available locally.".format(
id, build_image
)
)
already_checked_images.append(build_image)
else:
logging.info(
"Build {} requirement: docker image {} availability was already checked.".format(
id, build_image
)
)
return hub_pulled_images
6 changes: 6 additions & 0 deletions redis_benchmarks_specification/__cli__/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,4 +63,10 @@ def spec_cli_args(parser):
action="store_true",
help="Only check how many benchmarks we would trigger. Don't request benchmark runs at the end.",
)
parser.add_argument(
"--last_n",
type=int,
default=-1,
help="Use the last N samples. by default will use all available values",
)
return parser
16 changes: 13 additions & 3 deletions redis_benchmarks_specification/__cli__/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ def cli_command_logic(args, project_name, project_version):
)
repo = git.Repo(redisDirPath)
commits = []
total_commits = 0
if args.use_branch:
for commit in repo.iter_commits():
if (
Expand All @@ -98,8 +99,17 @@ def cli_command_logic(args, project_name, project_version):
)
<= args.to_date
):
print(commit.summary)
commits.append({"git_hash": commit.hexsha, "git_branch": args.branch})
if (
args.last_n > 0 and total_commits < args.last_n
) or args.last_n == -1:
total_commits = total_commits + 1
print(commit.summary)
commits.append(
{
"git_hash": commit.hexsha,
"git_branch": repo.active_branch.name,
}
)
if args.use_tags:
tags_regexp = args.tags_regexp
if tags_regexp == ".*":
Expand Down Expand Up @@ -150,7 +160,7 @@ def cli_command_logic(args, project_name, project_version):
pass
by_description = "n/a"
if args.use_branch:
by_description = "from branch {}".format(args.branch)
by_description = "from branch {}".format(repo.active_branch.name)
if args.use_tags:
by_description = "by tags"
logging.info(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,4 +116,10 @@ def create_self_contained_coordinator_args(project_name):
action="store_true",
help="Read the docker images from redis keys.",
)
parser.add_argument(
"--verbose",
default=False,
action="store_true",
help="Run in verbose mode.",
)
return parser
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import json
import logging


def extract_build_info_from_streamdata(testDetails):
use_git_timestamp = False
git_timestamp_ms = None
git_version = None
git_branch = None
metadata = None
build_variant_name = None
fields = [fieldname.decode() for fieldname in testDetails.keys()]
logging.info("Fields on stream {}".format(fields))
git_hash = testDetails[b"git_hash"]
if b"use_git_timestamp" in testDetails:
use_git_timestamp = bool(testDetails[b"use_git_timestamp"].decode())
if b"git_timestamp_ms" in testDetails:
git_timestamp_ms = int(testDetails[b"git_timestamp_ms"].decode())
if b"id" in testDetails:
build_variant_name = testDetails[b"id"]
if type(build_variant_name) == bytes:
build_variant_name = build_variant_name.decode()
if b"git_branch" in testDetails:
git_branch = testDetails[b"git_branch"]
if type(git_branch) == bytes:
git_branch = git_branch.decode()
if b"git_version" in testDetails:
git_version = testDetails[b"git_version"]
if type(git_version) == bytes:
git_version = git_version.decode()
if type(git_hash) == bytes:
git_hash = git_hash.decode()
logging.info("Received commit hash specifier {}.".format(git_hash))
build_artifacts_str = "redis-server"
build_image = testDetails[b"build_image"].decode()
run_image = build_image
if b"run_image" in testDetails:
run_image = testDetails[b"run_image"].decode()
logging.info("detected run image info {}.".format(run_image))
else:
logging.info("using build image info {}.".format(build_image))
if b"build_artifacts" in testDetails:
build_artifacts_str = testDetails[b"build_artifacts"].decode()
build_artifacts = build_artifacts_str.split(",")
if b"metadata" in testDetails:
metadata = json.loads(testDetails[b"metadata"].decode())
return (
build_variant_name,
metadata,
build_artifacts,
git_hash,
git_branch,
git_version,
run_image,
use_git_timestamp,
git_timestamp_ms,
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
def prepare_memtier_benchmark_parameters(
clientconfig,
full_benchmark_path,
port,
server,
local_benchmark_output_filename,
oss_cluster_api_enabled,
):
benchmark_command = [
full_benchmark_path,
"--port",
"{}".format(port),
"--server",
"{}".format(server),
"--json-out-file",
local_benchmark_output_filename,
]
if oss_cluster_api_enabled is True:
benchmark_command.append("--cluster-mode")
benchmark_command_str = " ".join(benchmark_command)
if "arguments" in clientconfig:
benchmark_command_str = benchmark_command_str + " " + clientconfig["arguments"]

return None, benchmark_command_str
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import math


def generate_cpuset_cpus(ceil_db_cpu_limit, current_cpu_pos):
previous_cpu_pos = current_cpu_pos
current_cpu_pos = current_cpu_pos + int(ceil_db_cpu_limit)
db_cpuset_cpus = ",".join(
[str(x) for x in range(previous_cpu_pos, current_cpu_pos)]
)
return db_cpuset_cpus, current_cpu_pos


def extract_db_cpu_limit(topologies_map, topology_spec_name):
topology_spec = topologies_map[topology_spec_name]
db_cpu_limit = topology_spec["resources"]["requests"]["cpus"]
ceil_db_cpu_limit = math.ceil(float(db_cpu_limit))
return ceil_db_cpu_limit
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
import logging

import docker

from redis_benchmarks_specification.__self_contained_coordinator__.cpuset import (
generate_cpuset_cpus,
)


def generate_standalone_redis_server_args(
binary, port, dbdir, configuration_parameters=None
):
added_params = ["port", "protected-mode", "dir"]
# start redis-server
command = [
binary,
"--protected-mode",
"no",
"--port",
"{}".format(port),
"--dir",
dbdir,
]
if configuration_parameters is not None:
for parameter, parameter_value in configuration_parameters.items():
if parameter not in added_params:
command.extend(
[
"--{}".format(parameter),
parameter_value,
]
)
return command


def teardown_containers(redis_containers, container_type):
for container in redis_containers:
try:
container.stop()
except docker.errors.NotFound:
logging.info(
"When trying to stop {} container with id {} and image {} it was already stopped".format(
container_type, container.id, container.image
)
)
pass


def spin_docker_standalone_redis(
ceil_db_cpu_limit,
current_cpu_pos,
docker_client,
redis_configuration_parameters,
redis_containers,
redis_proc_start_port,
run_image,
temporary_dir,
):
mnt_point = "/mnt/redis/"
command = generate_standalone_redis_server_args(
"{}redis-server".format(mnt_point),
redis_proc_start_port,
mnt_point,
redis_configuration_parameters,
)
command_str = " ".join(command)
db_cpuset_cpus, current_cpu_pos = generate_cpuset_cpus(
ceil_db_cpu_limit, current_cpu_pos
)
logging.info(
"Running redis-server on docker image {} (cpuset={}) with the following args: {}".format(
run_image, db_cpuset_cpus, command_str
)
)
container = docker_client.containers.run(
image=run_image,
volumes={
temporary_dir: {"bind": mnt_point, "mode": "rw"},
},
auto_remove=True,
privileged=True,
working_dir=mnt_point,
command=command_str,
network_mode="host",
detach=True,
cpuset_cpus=db_cpuset_cpus,
pid_mode="host",
)
redis_containers.append(container)
return current_cpu_pos
Loading