Skip to content

Enabled defaults metrics parsing and merging #226

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Apr 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "redis-benchmarks-specification"
version = "0.1.68"
version = "0.1.69"
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
authors = ["filipecosta90 <filipecosta.90@gmail.com>","Redis Performance Group <performance@redis.com>"]
readme = "Readme.md"
Expand Down
22 changes: 19 additions & 3 deletions redis_benchmarks_specification/__builder__/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def main():
"--logname", type=str, default=None, help="logname to write the logs to"
)
parser.add_argument(
"--arch", type=str, default="x86", help="arch to build artifacts"
"--arch", type=str, default="amd64", help="arch to build artifacts"
)
parser.add_argument(
"--setups-folder",
Expand Down Expand Up @@ -132,6 +132,9 @@ def main():
logging.error("Error message {}".format(e.__str__()))
exit(1)

arch = args.arch
logging.info("Building for arch: {}".format(arch))

build_spec_image_prefetch(builders_folder, different_build_specs)

builder_consumer_group_create(conn)
Expand All @@ -144,6 +147,7 @@ def main():
different_build_specs,
previous_id,
args.docker_air_gap,
arch,
)


Expand All @@ -169,7 +173,12 @@ def builder_consumer_group_create(conn, id="$"):


def builder_process_stream(
builders_folder, conn, different_build_specs, previous_id, docker_air_gap=False
builders_folder,
conn,
different_build_specs,
previous_id,
docker_air_gap=False,
arch="amd64",
):
new_builds_count = 0
logging.info("Entering blocking read waiting for work.")
Expand Down Expand Up @@ -217,6 +226,14 @@ def builder_process_stream(
build_config_metadata = get_build_config_metadata(build_config)

build_image = build_config["build_image"]
build_arch = build_config["arch"]
if build_arch != arch:
logging.info(
"skipping build spec {} given arch {}!={}".format(
build_spec, build_arch, arch
)
)
continue
run_image = build_image
if "run_image" in build_config:
run_image = build_config["run_image"]
Expand Down Expand Up @@ -247,7 +264,6 @@ def builder_process_stream(
compiler = build_config["compiler"]
cpp_compiler = build_config["cpp_compiler"]
build_os = build_config["os"]
build_arch = build_config["arch"]

build_artifacts = ["redis-server"]
if "build_artifacts" in build_config:
Expand Down
3 changes: 2 additions & 1 deletion redis_benchmarks_specification/__common__/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ def exporter_datasink_common(
tf_github_repo,
tf_triggering_env,
topology_spec_name,
default_metrics=None,
):
logging.info("Using datapoint_time_ms: {}".format(datapoint_time_ms))
timeseries_test_sucess_flow(
Expand All @@ -151,7 +152,7 @@ def exporter_datasink_common(
benchmark_config,
benchmark_duration_seconds,
dataset_load_duration_seconds,
None,
default_metrics,
topology_spec_name,
setup_name,
None,
Expand Down
4 changes: 3 additions & 1 deletion redis_benchmarks_specification/__runner__/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def run_client_runner_logic(args, project_name, project_name_suffix, project_ver
resp_version = args.resp
client_aggregated_results_folder = args.client_aggregated_results_folder
preserve_temporary_client_dirs = args.preserve_temporary_client_dirs
override_memtier_test_time = args.override_memtier_test_time

docker_client = docker.from_env()
home = str(Path.home())
profilers_list = []
Expand All @@ -172,6 +172,7 @@ def run_client_runner_logic(args, project_name, project_name_suffix, project_ver
)
)
exit(1)
override_memtier_test_time = args.override_memtier_test_time
if override_memtier_test_time > 0:
logging.info(
"Overriding memtier benchmark --test-time to {} seconds".format(
Expand Down Expand Up @@ -855,6 +856,7 @@ def process_self_contained_coordinator_stream(
tf_github_repo,
tf_triggering_env,
topology_spec_name,
default_metrics,
)
test_result = True
total_test_suite_runs = total_test_suite_runs + 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,4 +135,16 @@ def create_self_contained_coordinator_args(project_name):
action="store_true",
help="Run in verbose mode.",
)
parser.add_argument(
"--override-memtier-test-time",
default=0,
type=int,
help="override memtier test-time for each benchmark. By default will preserve test time specified in test spec",
)
parser.add_argument(
"--defaults_filename",
type=str,
default="{}/defaults.yml".format(SPECS_PATH_TEST_SUITES),
help="specify the defaults file containing spec topologies, common metric extractions,etc...",
)
return parser
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@
reset_commandstats,
exporter_datasink_common,
)
from redis_benchmarks_specification.__runner__.runner import (
print_results_table_stdout,
)
from redis_benchmarks_specification.__self_contained_coordinator__.args import (
create_self_contained_coordinator_args,
)
Expand Down Expand Up @@ -61,6 +64,7 @@
from redisbench_admin.utils.benchmark_config import (
get_final_benchmark_config,
extract_redis_dbconfig_parameters,
get_defaults,
)
from redisbench_admin.utils.local import get_local_run_full_filename
from redisbench_admin.utils.results import post_process_benchmark_results
Expand Down Expand Up @@ -197,6 +201,15 @@ def main():
datasink_push_results_redistimeseries = args.datasink_push_results_redistimeseries
grafana_profile_dashboard = args.grafana_profile_dashboard

defaults_filename = args.defaults_filename
(
_,
default_metrics,
_,
_,
_,
) = get_defaults(defaults_filename)

# Consumer id
consumer_pos = args.consumer_pos
logging.info("Consumer pos {}".format(consumer_pos))
Expand All @@ -221,6 +234,13 @@ def main():
)
exit(1)

override_memtier_test_time = args.override_memtier_test_time
if override_memtier_test_time > 0:
logging.info(
"Overriding memtier benchmark --test-time to {} seconds".format(
override_memtier_test_time
)
)
logging.info("Entering blocking read waiting for work.")
if stream_id is None:
stream_id = args.consumer_start_id
Expand All @@ -242,6 +262,8 @@ def main():
redis_proc_start_port,
consumer_pos,
docker_air_gap,
override_memtier_test_time,
default_metrics,
)


Expand All @@ -262,6 +284,8 @@ def self_contained_coordinator_blocking_read(
redis_proc_start_port=6379,
consumer_pos=1,
docker_air_gap=False,
override_test_time=None,
default_metrics=None,
):
num_process_streams = 0
num_process_test_suites = 0
Expand Down Expand Up @@ -304,6 +328,9 @@ def self_contained_coordinator_blocking_read(
cpuset_start_pos,
redis_proc_start_port,
docker_air_gap,
"defaults.yml",
None,
default_metrics,
)
num_process_streams = num_process_streams + 1
num_process_test_suites = num_process_test_suites + total_test_suite_runs
Expand Down Expand Up @@ -373,6 +400,8 @@ def process_self_contained_coordinator_stream(
redis_proc_start_port=6379,
docker_air_gap=False,
defaults_filename="defaults.yml",
override_test_time=None,
default_metrics=[],
):
stream_id = "n/a"
overall_result = False
Expand Down Expand Up @@ -796,6 +825,15 @@ def process_self_contained_coordinator_stream(
"r",
) as json_file:
results_dict = json.load(json_file)
print_results_table_stdout(
benchmark_config,
default_metrics,
results_dict,
setup_type,
test_name,
None,
)

dataset_load_duration_seconds = 0

exporter_datasink_common(
Expand All @@ -819,6 +857,7 @@ def process_self_contained_coordinator_stream(
tf_github_repo,
tf_triggering_env,
topology_spec_name,
default_metrics,
)
r.shutdown(save=False)
test_result = True
Expand Down Expand Up @@ -867,13 +906,13 @@ def process_self_contained_coordinator_stream(
)
)
pass
shutil.rmtree(temporary_dir, ignore_errors=True)
shutil.rmtree(temporary_dir_client, ignore_errors=True)
logging.info(
"Removing temporary dirs {} and {}".format(
temporary_dir, temporary_dir_client
)
)
shutil.rmtree(temporary_dir, ignore_errors=True)
shutil.rmtree(temporary_dir_client, ignore_errors=True)

overall_result &= test_result

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
version: 0.1
id: gcc:8.5.0-arm64-debian-buster-default
os: debian-buster
arch: arm64
compiler: "gcc"
cpp_compiler: "g++"
kind: docker
build_image: gcc:8.5.0-buster
run_image: debian:buster
description: "Using GNU Compiler Containers (https://hub.docker.com/_/gcc?tab=description)
pre-configured environment with all the tools required to build with gcc."
metadata:
compiler: "gcc"
compiler_version: "8.5.0"
os: debian-buster
arch: arm64

env:
REDIS_CFLAGS: "-g -fno-omit-frame-pointer"