Skip to content

Move temporary dir deletion to inner function, delete temp dir in skipped benchmarks #248

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Sep 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 58 additions & 12 deletions redis_benchmarks_specification/__runner__/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,6 +359,19 @@ def process_self_contained_coordinator_stream(
resp_version=None,
override_memtier_test_time=0,
):
def delete_temporary_files(
temporary_dir_client, full_result_path, benchmark_tool_global
):
if preserve_temporary_client_dirs is True:
logging.info(f"Preserving temporary client dir {temporary_dir_client}")
else:
if "redis-benchmark" in benchmark_tool_global:
if full_result_path is not None:
os.remove(full_result_path)
logging.info("Removing temporary JSON file")
shutil.rmtree(temporary_dir_client, ignore_errors=True)
logging.info(f"Removing temporary client dir {temporary_dir_client}")

overall_result = True
results_matrix = []
total_test_suite_runs = 0
Expand Down Expand Up @@ -507,6 +520,11 @@ def process_self_contained_coordinator_stream(
test_name, maxmemory, benchmark_required_memory
)
)
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=None,
benchmark_tool_global=benchmark_tool_global,
)
continue

reset_commandstats(redis_conns)
Expand Down Expand Up @@ -543,13 +561,23 @@ def process_self_contained_coordinator_stream(
test_name, priority_upper_limit, priority
)
)
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=None,
benchmark_tool_global=benchmark_tool_global,
)
continue
if priority < priority_lower_limit:
logging.warning(
"Skipping test {} giving the priority limit ({}) is bellow the priority value ({})".format(
test_name, priority_lower_limit, priority
)
)
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=None,
benchmark_tool_global=benchmark_tool_global,
)
continue
logging.info(
"Test {} priority ({}) is within the priority limit [{},{}]".format(
Expand All @@ -567,10 +595,20 @@ def process_self_contained_coordinator_stream(
test_name
)
)
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=None,
benchmark_tool_global=benchmark_tool_global,
)
continue

if dry_run is True:
dry_run_count = dry_run_count + 1
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=None,
benchmark_tool_global=benchmark_tool_global,
)
continue

if "preload_tool" in benchmark_config["dbconfig"]:
Expand Down Expand Up @@ -598,6 +636,11 @@ def process_self_contained_coordinator_stream(
logging.warning(
"Skipping this test given preload result was false"
)
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=None,
benchmark_tool_global=benchmark_tool_global,
)
continue
execute_init_commands(
benchmark_config, r, dbconfig_keyname="dbconfig"
Expand All @@ -618,6 +661,11 @@ def process_self_contained_coordinator_stream(

if dry_run_include_preload is True:
dry_run_count = dry_run_count + 1
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=None,
benchmark_tool_global=benchmark_tool_global,
)
continue

benchmark_tool = extract_client_tool(benchmark_config)
Expand Down Expand Up @@ -692,6 +740,11 @@ def process_self_contained_coordinator_stream(
logging.warning(
"Forcing skip this test given there is an arbitrary commmand and memtier usage. Check https://github.com/RedisLabs/memtier_benchmark/pull/117 ."
)
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=None,
benchmark_tool_global=benchmark_tool_global,
)
continue

client_container_image = extract_client_container_image(
Expand Down Expand Up @@ -899,18 +952,11 @@ def process_self_contained_coordinator_stream(
shutil.copy(full_result_path, dest_fpath)
overall_result &= test_result

if preserve_temporary_client_dirs is True:
logging.info(
f"Preserving temporary client dir {temporary_dir_client}"
)
else:
if "redis-benchmark" in benchmark_tool_global:
os.remove(full_result_path)
logging.info("Removing temporary JSON file")
shutil.rmtree(temporary_dir_client, ignore_errors=True)
logging.info(
f"Removing temporary client dir {temporary_dir_client}"
)
delete_temporary_files(
temporary_dir_client=temporary_dir_client,
full_result_path=full_result_path,
benchmark_tool_global=benchmark_tool_global,
)

table_name = "Results for entire test-suite"
results_matrix_headers = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -912,9 +912,7 @@ def process_self_contained_coordinator_stream(
logging.critical("Printing redis container log....")
print("-" * 60)
print(
redis_container.logs(
stdout=True, stderr=True, logs=True
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also there was an issue with this line in contained_coordinator. When benchmark raised an exception for any reason we tried to print container logs but it resulted with: TypeError: ContainerApiMixin.logs() got an unexpected keyword argument 'logs'. According to https://docker-py.readthedocs.io/en/stable/containers.html#docker.models.containers.Container.logs parameter logs for method logs() doesn't exist. This led to skipping 'teardown' stage which led to temporary folders being left with the redis-server artifact inside.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With that change instead we get:

2023-09-13 01:33:49 CRITICAL Some unexpected exception was caught during local work. Failing test....
2023-09-13 01:33:49 CRITICAL <class 'KeyError'>
2023-09-13 01:33:49 INFO Tearing down setup
2023-09-13 01:33:49 INFO When trying to stop DB container with id a9b45d05a8ba306464d79d3d0deeaa2a72dd7d8761b838aefe5d318326e1cf5e and image <Image: 'debian:buster'> it was already stopped
2023-09-13 01:33:49 INFO Removing temporary dirs /root/tmp7rhog4yu and /root/tmpkcu7fzqu

And temporary dirs are removed

)
redis_container.logs(stdout=True, stderr=True)
)
print("-" * 60)
test_result = False
Expand Down