Skip to content

Avoid RDB usage on unit tests #145

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Dec 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .github/workflows/tox.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ on:
jobs:
pytest:
strategy:
fail-fast: false
matrix:
python-version: [ '3.8', '3.9', '3.10' ]
env:
Expand Down Expand Up @@ -38,7 +39,7 @@ jobs:

- name: Run tox
run: |
TST_RUNNER_USE_RDB=0 tox
tox

- name: Upload coverage to Codecov
uses: codecov/codecov-action@v2
Expand Down
8 changes: 5 additions & 3 deletions redis_benchmarks_specification/__builder__/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,11 +329,13 @@ def builder_process_stream(
bin_artifact = open(
"{}src/{}".format(redis_temporary_dir, artifact), "rb"
).read()
bin_artifact_len = len(bytes(bin_artifact))
assert bin_artifact_len > 0
conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
build_stream_fields[artifact] = bin_key
build_stream_fields["{}_len_bytes".format(artifact)] = len(
bytes(bin_artifact)
)
build_stream_fields[
"{}_len_bytes".format(artifact)
] = bin_artifact_len
result = True
if result is True:
stream_id = conn.xadd(
Expand Down
14 changes: 12 additions & 2 deletions redis_benchmarks_specification/__common__/builder_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,12 @@ def commit_schema_to_stream(
binary_key,
binary_value,
) = get_commit_dict_from_sha(
fields["git_hash"], gh_org, gh_repo, fields, use_git_timestamp, gh_token
fields["git_hash"],
gh_org,
gh_repo,
fields,
use_git_timestamp,
gh_token,
)
reply_fields["use_git_timestamp"] = fields["use_git_timestamp"]
if "git_timestamp_ms" in fields:
Expand All @@ -66,6 +71,7 @@ def get_archive_zip_from_hash(gh_org, gh_repo, git_hash, fields):
gh_org, gh_repo, git_hash
)
try:
logging.info("Fetching data from {}".format(github_url))
response = urlopen(github_url, timeout=5)
content = response.read()
fields["zip_archive_key"] = bin_key
Expand All @@ -78,6 +84,7 @@ def get_archive_zip_from_hash(gh_org, gh_repo, git_hash, fields):
)
logging.error(error_msg)
result = False

return result, bin_key, binary_value, error_msg


Expand Down Expand Up @@ -107,7 +114,10 @@ def get_commit_dict_from_sha(
commit_dict["git_branch"] = gh_branch

result, binary_key, binary_value, error_msg = get_archive_zip_from_hash(
gh_org, gh_repo, git_hash, commit_dict
gh_org,
gh_repo,
git_hash,
commit_dict,
)
return result, error_msg, commit_dict, commit, binary_key, binary_value

Expand Down
5 changes: 1 addition & 4 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ basepython = python3

[testenv:integration-tests]
deps = -r{toxinidir}/dev_requirements.txt
passenv = TST_BUILDER_X TST_RUNNER_X TST_RUNNER_USE_RDB GH_TOKEN TST_REDIS_DIR
passenv = TST_BUILDER_X TST_RUNNER_X GH_TOKEN TST_REDIS_DIR

commands =
black --check redis_benchmarks_specification
Expand All @@ -26,9 +26,6 @@ docker =
image = redis/redis-stack-server:7.0.2-RC4
ports =
16379:6379/tcp
volumes =
bind:rw:{toxinidir}/utils/tests/test_data/:/data


[docker:db_server]
image = redis/redis-stack-server:7.0.2-RC4
Expand Down
84 changes: 43 additions & 41 deletions utils/tests/test_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,31 +39,27 @@ def test_build_spec_image_prefetch():

def test_commit_schema_to_stream_then_build():
try:
run_builder = True
TST_BUILDER_X = os.getenv("TST_BUILDER_X", "1")
if TST_BUILDER_X == "0":
run_builder = False
if run_builder:
if should_run_builder():
conn = redis.StrictRedis(port=16379)
conn.ping()
conn.flushall()
builder_consumer_group_create(conn, "0")
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 0

result, reply_fields, error_msg = commit_schema_to_stream(
{
"git_hash": "0cf2df84d4b27af4bffd2bf3543838f09e10f874",
"git_branch": "unstable",
},
conn,
"redis",
"redis",
)
assert result == True
assert error_msg == None
assert STREAM_KEYNAME_GH_EVENTS_COMMIT.encode() in conn.keys()
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 1
assert "id" in reply_fields
events_in_pipe = conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT)
if events_in_pipe == 0:
result, reply_fields, error_msg = commit_schema_to_stream(
{
"git_hash": "0cf2df84d4b27af4bffd2bf3543838f09e10f874",
"git_branch": "unstable",
},
conn,
"redis",
"redis",
)
assert result == True
assert error_msg == None
assert STREAM_KEYNAME_GH_EVENTS_COMMIT.encode() in conn.keys()
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 1
assert "id" in reply_fields
builders_folder = "./redis_benchmarks_specification/setups/builders"
different_build_specs = ["gcc:8.5.0-amd64-debian-buster-default.yml"]
previous_id = ">"
Expand All @@ -78,33 +74,38 @@ def test_commit_schema_to_stream_then_build():
pass


def should_run_builder():
run_builder = True
TST_BUILDER_X = os.getenv("TST_BUILDER_X", "1")
if TST_BUILDER_X == "0":
run_builder = False
return run_builder


def test_commit_schema_to_stream_then_build_historical_redis():
try:
run_builder = True
TST_BUILDER_X = os.getenv("TST_BUILDER_X", "1")
if TST_BUILDER_X == "0":
run_builder = False
if run_builder:
if should_run_builder():
conn = redis.StrictRedis(port=16379)
conn.ping()
conn.flushall()
builder_consumer_group_create(conn, "0")
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 0
events_in_pipe = conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT)
if events_in_pipe == 0:

result, reply_fields, error_msg = commit_schema_to_stream(
{
"git_hash": "021af7629590c638ae0d4867d4b397f6e0c38ec8",
"git_version": "5.0.13",
},
conn,
"redis",
"redis",
)
assert result == True
assert error_msg == None
assert STREAM_KEYNAME_GH_EVENTS_COMMIT.encode() in conn.keys()
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 1
assert "id" in reply_fields
result, reply_fields, error_msg = commit_schema_to_stream(
{
"git_hash": "021af7629590c638ae0d4867d4b397f6e0c38ec8",
"git_version": "5.0.13",
},
conn,
"redis",
"redis",
)
assert result == True
assert error_msg == None
assert STREAM_KEYNAME_GH_EVENTS_COMMIT.encode() in conn.keys()
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 1
assert "id" in reply_fields
builders_folder = "./redis_benchmarks_specification/setups/builders"
different_build_specs = ["gcc:8.5.0-amd64-debian-buster-default.yml"]
previous_id = ">"
Expand All @@ -113,6 +114,7 @@ def test_commit_schema_to_stream_then_build_historical_redis():
)
assert new_builds_count == 1
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
conn.save()

except redis.exceptions.ConnectionError:
pass
Expand Down
48 changes: 48 additions & 0 deletions utils/tests/test_builder_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# BSD 3-Clause License
#
# Copyright (c) 2021., Redis Labs Modules
# All rights reserved.
#

import redis

from redis_benchmarks_specification.__common__.builder_schema import (
commit_schema_to_stream,
)

from redis_benchmarks_specification.__builder__.builder import (
builder_consumer_group_create,
)
from redis_benchmarks_specification.__common__.env import (
STREAM_KEYNAME_GH_EVENTS_COMMIT,
)
from utils.tests.test_builder import should_run_builder


def test_commit_schema_to_stream():
try:
if should_run_builder():
conn = redis.StrictRedis(port=16379)
conn.ping()
conn.flushall()
builder_consumer_group_create(conn, "0")
events_in_pipe = conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT)
if events_in_pipe == 0:

result, reply_fields, error_msg = commit_schema_to_stream(
{
"git_hash": "021af7629590c638ae0d4867d4b397f6e0c38ec8",
"git_version": "5.0.13",
},
conn,
"redis",
"redis",
)
assert result == True
assert error_msg == None
assert STREAM_KEYNAME_GH_EVENTS_COMMIT.encode() in conn.keys()
assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 1
assert "id" in reply_fields

except redis.exceptions.ConnectionError:
pass
Binary file removed utils/tests/test_data/dump.rdb
Binary file not shown.
24 changes: 8 additions & 16 deletions utils/tests/test_self_contained_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
)
from redis_benchmarks_specification.__setups__.topologies import get_topologies
from utils.tests.test_data.api_builder_common import flow_1_and_2_api_builder_checks
from utils.tests.test_self_contained_coordinator_memtier import rdb_load_in_tests


def test_extract_client_cpu_limit():
Expand Down Expand Up @@ -99,22 +98,15 @@ def test_self_contained_coordinator_blocking_read():
if run_coordinator:
conn = redis.StrictRedis(port=16379)
conn.ping()
build_variant_name = "gcc:8.5.0-amd64-debian-buster-default"
expected_datapoint_ts = None
use_rdb = rdb_load_in_tests(conn)
if use_rdb is False:
conn.flushall()
build_variant_name, reply_fields = flow_1_and_2_api_builder_checks(conn)
if b"git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(
reply_fields[b"git_timestamp_ms"].decode()
)
if b"git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(
reply_fields[b"git_timestamp_ms"].decode()
)
if "git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields["git_timestamp_ms"])
conn.flushall()
build_variant_name, reply_fields = flow_1_and_2_api_builder_checks(conn)
if b"git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields[b"git_timestamp_ms"].decode())
if b"git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields[b"git_timestamp_ms"].decode())
if "git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields["git_timestamp_ms"])

assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
Expand Down
50 changes: 12 additions & 38 deletions utils/tests/test_self_contained_coordinator_memtier.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,18 +37,13 @@ def test_self_contained_coordinator_blocking_read():
if run_coordinator:
conn = redis.StrictRedis(port=16379)
conn.ping()
build_variant_name = "gcc:8.5.0-amd64-debian-buster-default"
expected_datapoint_ts = None
use_rdb = rdb_load_in_tests(conn)
if use_rdb is False:
conn.flushall()
build_variant_name, reply_fields = flow_1_and_2_api_builder_checks(conn)
if b"git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(
reply_fields[b"git_timestamp_ms"].decode()
)
if "git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields["git_timestamp_ms"])
conn.flushall()
build_variant_name, reply_fields = flow_1_and_2_api_builder_checks(conn)
if b"git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields[b"git_timestamp_ms"].decode())
if "git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields["git_timestamp_ms"])

assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
Expand Down Expand Up @@ -195,23 +190,6 @@ def test_self_contained_coordinator_blocking_read():
pass


def rdb_load_in_tests(conn):
use_rdb = True
TST_RUNNER_USE_RDB = os.getenv("TST_RUNNER_USE_RDB", "1")
if TST_RUNNER_USE_RDB == "0":
use_rdb = False
if use_rdb:
try:
conn.execute_command("DEBUG", "RELOAD", "NOSAVE")
except redis.exceptions.ResponseError as e:
if "DEBUG command not allowed" in e.__str__():
use_rdb = False
pass
else:
raise e
return use_rdb


def test_self_contained_coordinator_skip_build_variant():
try:
run_coordinator = True
Expand All @@ -223,16 +201,12 @@ def test_self_contained_coordinator_skip_build_variant():
conn.ping()
build_variant_name = "gcc:8.5.0-amd64-debian-buster-default"
expected_datapoint_ts = None
use_rdb = rdb_load_in_tests(conn)
if use_rdb is False:
conn.flushall()
build_variant_name, reply_fields = flow_1_and_2_api_builder_checks(conn)
if b"git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(
reply_fields[b"git_timestamp_ms"].decode()
)
if "git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields["git_timestamp_ms"])
conn.flushall()
build_variant_name, reply_fields = flow_1_and_2_api_builder_checks(conn)
if b"git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields[b"git_timestamp_ms"].decode())
if "git_timestamp_ms" in reply_fields:
expected_datapoint_ts = int(reply_fields["git_timestamp_ms"])

assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
Expand Down