Skip to content

Commit

Permalink
test: Format with black
Browse files Browse the repository at this point in the history
Signed-off-by: Jonathan Woollett-Light <jcawl@amazon.co.uk>
  • Loading branch information
Jonathan Woollett-Light authored and JonathanWoollett-Light committed Jun 16, 2023
1 parent 285971e commit 56bfdc9
Show file tree
Hide file tree
Showing 8 changed files with 199 additions and 195 deletions.
156 changes: 81 additions & 75 deletions .buildkite/autogenerate_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,19 +64,19 @@
# This represents the version of the Buildkite Docker plugin.
DOCKER_PLUGIN_VERSION = "v5.3.0"

X86_AGENT_TAGS = os.getenv('X86_LINUX_AGENT_TAGS')
AARCH64_AGENT_TAGS = os.getenv('AARCH64_LINUX_AGENT_TAGS')
DOCKER_PLUGIN_CONFIG = os.getenv('DOCKER_PLUGIN_CONFIG')
TESTS_TO_SKIP = os.getenv('TESTS_TO_SKIP')
TIMEOUTS_MIN = os.getenv('TIMEOUTS_MIN')
X86_AGENT_TAGS = os.getenv("X86_LINUX_AGENT_TAGS")
AARCH64_AGENT_TAGS = os.getenv("AARCH64_LINUX_AGENT_TAGS")
DOCKER_PLUGIN_CONFIG = os.getenv("DOCKER_PLUGIN_CONFIG")
TESTS_TO_SKIP = os.getenv("TESTS_TO_SKIP")
TIMEOUTS_MIN = os.getenv("TIMEOUTS_MIN")
# This env allows setting the hypervisor on which the tests are running at the
# pipeline level. This will not override the hypervisor tag in case one is
# already specified in the test definition.
# Most of the repositories don't really need to run on KVM per se, but we are
# experiencing some timeouts mostly with the mshv hosts right now, and we are
# fixing the default to kvm to work around that problem.
# More details here: https://github.com/rust-vmm/community/issues/137
DEFAULT_AGENT_TAG_HYPERVISOR = os.getenv('DEFAULT_AGENT_TAG_HYPERVISOR', 'kvm')
DEFAULT_AGENT_TAG_HYPERVISOR = os.getenv("DEFAULT_AGENT_TAG_HYPERVISOR", "kvm")

PARENT_DIR = pathlib.Path(__file__).parent.resolve()

Expand All @@ -99,59 +99,59 @@ def __init__(self):
# dictionaries are ordered. For readability reasons, this order should
# not be changed.
self.step_config = {
'label': None,
'command': None,
'retry': {'automatic': False},
'agents': {'os': 'linux'},
'plugins': [
"label": None,
"command": None,
"retry": {"automatic": False},
"agents": {"os": "linux"},
"plugins": [
{
f"docker#{DOCKER_PLUGIN_VERSION}": {
'image': f"rustvmm/dev:{CONTAINER_VERSION}",
'always-pull': True
"image": f"rustvmm/dev:{CONTAINER_VERSION}",
"always-pull": True,
}
}
],
'timeout_in_minutes': 15
"timeout_in_minutes": 15,
}

def _set_platform(self, platform):
""" Set platform if given in the json input. """
"""Set platform if given in the json input."""

if platform:
# We need to change `aarch64` to `arm` because of the way we are
# setting the tags on the host.
if platform == 'aarch64':
platform = 'arm'
self.step_config['agents']['platform'] = f"{platform}.metal"
if platform == "aarch64":
platform = "arm"
self.step_config["agents"]["platform"] = f"{platform}.metal"

def _set_hypervisor(self, hypervisor):
""" Set hypervisor if given in the json input. """
supported_hypervisors = ['kvm', 'mshv']
"""Set hypervisor if given in the json input."""
supported_hypervisors = ["kvm", "mshv"]
if hypervisor:
if hypervisor in supported_hypervisors:
self.step_config['agents']['hypervisor'] = hypervisor
self.step_config["agents"]["hypervisor"] = hypervisor

def _set_conditional(self, conditional):
""" Set conditional if given in the json input. """
"""Set conditional if given in the json input."""

if conditional:
self.step_config['if'] = conditional
self.step_config["if"] = conditional

def _set_timeout_in_minutes(self, timeout):
""" Set the timeout if given in the json input. """
"""Set the timeout if given in the json input."""
if timeout:
self.step_config['timeout_in_minutes'] = timeout
self.step_config["timeout_in_minutes"] = timeout

def _set_agent_queue(self, queue):
"""Set the agent queue if provided in the json input."""
if queue:
self.step_config['agents']['queue'] = queue
self.step_config["agents"]["queue"] = queue

def _add_docker_config(self, cfg):
""" Add configuration for docker if given in the json input. """
"""Add configuration for docker if given in the json input."""

if cfg:
target = self.step_config['plugins'][0][f"docker#{DOCKER_PLUGIN_VERSION}"]
target = self.step_config["plugins"][0][f"docker#{DOCKER_PLUGIN_VERSION}"]
for key, val in cfg.items():
target[key] = val

Expand All @@ -164,13 +164,11 @@ def _env_change_config(self, test_name, env_var, target, override=False):
if env_var:
env_cfg = json.loads(env_var)

tests = env_cfg.get('tests')
assert tests, \
f"Environment variable {env_var} is missing the `tests` key."
tests = env_cfg.get("tests")
assert tests, f"Environment variable {env_var} is missing the `tests` key."

cfg = env_cfg.get('cfg')
assert cfg, \
f"Environment variable {env_var} is missing the `cfg` key."
cfg = env_cfg.get("cfg")
assert cfg, f"Environment variable {env_var} is missing the `cfg` key."

if test_name in tests:
if override:
Expand All @@ -186,17 +184,17 @@ def _env_override_agent_tags(self, test_name):
"""

env_var = None
platform = self.step_config['agents'].get('platform')
platform = self.step_config["agents"].get("platform")

# Since the platform is optional, only override the config if the
# platform was provided.
if platform:
if platform == 'x86_64.metal' and X86_AGENT_TAGS:
if platform == "x86_64.metal" and X86_AGENT_TAGS:
env_var = X86_AGENT_TAGS
if platform == 'arm.metal' and AARCH64_AGENT_TAGS:
if platform == "arm.metal" and AARCH64_AGENT_TAGS:
env_var = AARCH64_AGENT_TAGS

target = self.step_config['agents']
target = self.step_config["agents"]
self._env_change_config(test_name, env_var, target, override=True)

def _env_add_docker_config(self, test_name):
Expand All @@ -205,7 +203,7 @@ def _env_add_docker_config(self, test_name):
`DOCKER_PLUGIN_CONFIG` environment variable.
"""

target = self.step_config['plugins'][0][f"docker#{DOCKER_PLUGIN_VERSION}"]
target = self.step_config["plugins"][0][f"docker#{DOCKER_PLUGIN_VERSION}"]
self._env_change_config(test_name, DOCKER_PLUGIN_CONFIG, target)

def _env_override_timeout(self, test_name):
Expand All @@ -221,28 +219,25 @@ def build(self, input):
Further configuration from environment variables may be added.
"""

test_name = input.get('test_name')
command = input.get('command')
platform = input.get('platform')
hypervisor = input.get('hypervisor')
docker = input.get('docker_plugin')
conditional = input.get('conditional')
timeout = input.get('timeout_in_minutes')
queue = input.get('queue')
test_name = input.get("test_name")
command = input.get("command")
platform = input.get("platform")
hypervisor = input.get("hypervisor")
docker = input.get("docker_plugin")
conditional = input.get("conditional")
timeout = input.get("timeout_in_minutes")
queue = input.get("queue")

# Mandatory keys.
assert test_name, "Step is missing test name."
platform_string = f"-{platform}" if platform else ""
self.step_config['label'] = f"{test_name}{platform_string}"
self.step_config["label"] = f"{test_name}{platform_string}"

assert command, "Step is missing command."
if "{target_platform}" in command:
assert platform, \
"Command requires platform, but platform is missing."
command = command.replace(
"{target_platform}", platform
)
self.step_config['command'] = command
assert platform, "Command requires platform, but platform is missing."
command = command.replace("{target_platform}", platform)
self.step_config["command"] = command

# Optional keys.
self._set_platform(platform)
Expand All @@ -262,9 +257,19 @@ def build(self, input):
# forwarding the key, values without any change.
# We need to filter for keys that have special meaning and which we
# don't want to re-add.
special_keys = ['conditional', 'docker_plugin', 'platform', 'test_name', 'queue', 'hypervisor']
additional_keys = {k: v for k, v in input.items() if not (k in self.step_config) and
not(k in special_keys)}
special_keys = [
"conditional",
"docker_plugin",
"platform",
"test_name",
"queue",
"hypervisor",
]
additional_keys = {
k: v
for k, v in input.items()
if not (k in self.step_config) and not (k in special_keys)
}
if additional_keys:
self.step_config.update(additional_keys)

Expand All @@ -283,15 +288,15 @@ def __init__(self):
self.bk_config = None

def build(self, input):
""" Build the final Buildkite configuration fron the json input. """
"""Build the final Buildkite configuration fron the json input."""

self.bk_config = {'steps': []}
tests = input.get('tests')
self.bk_config = {"steps": []}
tests = input.get("tests")
assert tests, "Input is missing list of tests."

for test in tests:
platforms = test.get('platform')
test_name = test.get('test_name')
platforms = test.get("platform")
test_name = test.get("test_name")

if TESTS_TO_SKIP:
tests_to_skip = json.loads(TESTS_TO_SKIP)
Expand All @@ -305,20 +310,20 @@ def build(self, input):

for platform in platforms:
step_input = copy.deepcopy(test)
step_input['platform'] = platform
if not step_input.get('hypervisor'):
step_input['hypervisor'] = DEFAULT_AGENT_TAG_HYPERVISOR
step_input["platform"] = platform
if not step_input.get("hypervisor"):
step_input["hypervisor"] = DEFAULT_AGENT_TAG_HYPERVISOR

step = BuildkiteStep()
step_output = step.build(step_input)
self.bk_config['steps'].append(step_output)
self.bk_config["steps"].append(step_output)

# Return the object's attributes and their values as a dictionary.
return self.bk_config


def generate_pipeline(config_file):
""" Generate the pipeline yaml file from a json configuration file. """
"""Generate the pipeline yaml file from a json configuration file."""

with open(config_file) as json_file:
json_cfg = json.load(json_file)
Expand All @@ -329,7 +334,7 @@ def generate_pipeline(config_file):
yaml.dump(output, sys.stdout, sort_keys=False)


if __name__ == '__main__':
if __name__ == "__main__":
help_text = dedent(
"""
This script supports overriding the following configurations through
Expand All @@ -345,17 +350,18 @@ def generate_pipeline(config_file):
- TIMEOUTS_MIN: overrides the timeout value for specific tests.
"""
)
parser = ArgumentParser(description=help_text,
formatter_class=RawTextHelpFormatter)
parser = ArgumentParser(description=help_text, formatter_class=RawTextHelpFormatter)
# By default we're generating the rust-vmm-ci pipeline with the test
# configuration committed to this repository.
# This parameter is useful for generating the pipeline for repositories
# that have custom pipelines, and it helps with keeping the container
# version the same across pipelines.
parser.add_argument('-t', '--test-description',
metavar="JSON_FILE",
help='The path to the JSON file containing the test'
' description for the CI.',
default=f'{PARENT_DIR}/test_description.json')
parser.add_argument(
"-t",
"--test-description",
metavar="JSON_FILE",
help="The path to the JSON file containing the test" " description for the CI.",
default=f"{PARENT_DIR}/test_description.json",
)
args = parser.parse_args()
generate_pipeline(args.test_description)
14 changes: 14 additions & 0 deletions .github/workflows/black.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
on:
push:

jobs:
black:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Install black
run: pip install black

- name: Run black
run: black . --check
14 changes: 4 additions & 10 deletions integration_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,28 +15,22 @@ def pytest_addoption(parser):
"--profile",
default=PROFILE_CI,
choices=[PROFILE_CI, PROFILE_DEVEL],
help="Profile for running the test: {} or {}".format(
PROFILE_CI,
PROFILE_DEVEL
)
help="Profile for running the test: {} or {}".format(PROFILE_CI, PROFILE_DEVEL),
)
parser.addoption(
"--no-cleanup",
action="store_true",
default=False,
help="Keep the coverage report in `kcov_output` directory. If this "
"flag is not provided, both coverage related directories are "
"removed."
"flag is not provided, both coverage related directories are "
"removed.",
)

parser.addoption(
"--test-scope",
default=WORKSPACE,
choices=[WORKSPACE, CRATE],
help="Defines the scope of running tests: {} or {}".format(
WORKSPACE,
CRATE
)
help="Defines the scope of running tests: {} or {}".format(WORKSPACE, CRATE),
)


Expand Down
Loading

0 comments on commit 56bfdc9

Please sign in to comment.