Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Run Tox Test Environments in Parallel #9034

Merged
merged 17 commits into from
Jan 7, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions eng/pipelines/templates/jobs/archetype-sdk-client.yml
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ jobs:
PythonVersion: $(PythonVersion)
BuildTargetingString: ${{ parameters.BuildTargetingString }}
ToxTestEnv: 'whl,sdist'
ToxEnvParallel: '--tenvparallel'
BeforeTestSteps:
- task: DownloadPipelineArtifact@0
inputs:
Expand Down
2 changes: 2 additions & 0 deletions eng/pipelines/templates/steps/build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ parameters:
BuildTargetingString: 'azure-*'
ToxTestEnv: ""
RunCoverage: ne(variables['CoverageArg'], '--disablecov')
ToxEnvParallel: ''

steps:
- pwsh: |
Expand Down Expand Up @@ -41,6 +42,7 @@ steps:
--mark_arg="${{ parameters.TestMarkArgument }}"
--service="${{ parameters.ServiceDirectory }}"
--toxenv="${{ parameters.ToxTestEnv }}"
${{ parameters.ToxEnvParallel }}
env: ${{ parameters.EnvVars }}

- task: PublishTestResults@2
Expand Down
22 changes: 15 additions & 7 deletions eng/tox/tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# note that this envlist is the default set of environments that will run if a target environment is not selected.
envlist = whl,sdist


[tools]
deps =
-r ../../../eng/test_tools.txt
Expand All @@ -21,6 +22,7 @@ deps =

[testenv]
default_pytest_params = --junitxml={toxinidir}/test-junit-{envname}.xml --verbose --durations=10 --ignore=azure
parallel_show_output =True
pre-deps =
wheel
skip_install = true
Expand All @@ -35,10 +37,11 @@ setenv =
deps = {[base]deps}
changedir = {toxinidir}
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_wheel_and_install.py -d {distdir} -p {toxinidir}
{envbindir}/python {toxinidir}/../../../eng/tox/create_wheel_and_install.py -d {envtmpdir} -p {toxinidir}
pip freeze
pytest \
{[testenv]default_pytest_params} \
--ignore=.tox \
{posargs} \
{toxinidir}

Expand Down Expand Up @@ -79,6 +82,8 @@ commands =
pip freeze
pytest \
{posargs} \
--no-cov \
--ignore=.tox \
{toxinidir}


Expand All @@ -92,8 +97,10 @@ deps =
commands =
pytest \
{posargs} \
--ignore=.tox \
{toxinidir}


[testenv:sphinx]
skipsdist = false
skip_install = false
Expand Down Expand Up @@ -127,7 +134,7 @@ changedir = {toxinidir}
deps =
{[tools]deps}
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_wheel_and_install.py -d {distdir} -p {toxinidir}
{envbindir}/python {toxinidir}/../../../eng/tox/create_wheel_and_install.py -d {envtmpdir} -p {toxinidir}
{envbindir}/python {toxinidir}/../../../eng/tox/import_all.py -t {toxinidir}


Expand All @@ -137,8 +144,8 @@ skip_install = true
changedir = {toxinidir}
deps =
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_wheel_and_install.py -d {distdir} -p {toxinidir} --skip-install True
{envbindir}/python {toxinidir}/../../../eng/tox/verify_whl.py -d {distdir} -t {toxinidir}
{envbindir}/python {toxinidir}/../../../eng/tox/create_wheel_and_install.py -d {envtmpdir} -p {toxinidir} --skip-install True
{envbindir}/python {toxinidir}/../../../eng/tox/verify_whl.py -d {envtmpdir} -t {toxinidir}


[testenv:verifysdist]
Expand All @@ -147,8 +154,8 @@ skip_install = true
changedir = {toxinidir}
deps =
commands =
{envbindir}/python {toxinidir}/setup.py --q sdist --format zip -d {distdir}
{envbindir}/python {toxinidir}/../../../eng/tox/verify_sdist.py -d {distdir} -t {toxinidir}
{envbindir}/python {toxinidir}/setup.py --q sdist --format zip -d {envtmpdir}
{envbindir}/python {toxinidir}/../../../eng/tox/verify_sdist.py -d {envtmpdir} -t {toxinidir}


[testenv:devtest]
Expand All @@ -157,9 +164,10 @@ pre-deps =
deps = {[base]deps}
changedir = {toxinidir}
commands =
{envbindir}/python {toxinidir}/../../../eng/tox/create_wheel_and_install.py -d {distdir} -p {toxinidir}
{envbindir}/python {toxinidir}/../../../eng/tox/create_wheel_and_install.py -d {envtmpdir} -p {toxinidir}
{envbindir}/python {toxinidir}/../../../eng/tox/install_dev_build_dependency.py -t {toxinidir}
pytest \
{[testenv]default_pytest_params} \
--ignore=.tox \
{posargs} \
{toxinidir}
3 changes: 2 additions & 1 deletion scripts/devops_tasks/common_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@
"mgmt",
"azure-cognitiveservices",
"azure-servicefabric",
"nspkg"
"nspkg",
"azure-keyvault"
]
NON_MANAGEMENT_CODE_5_ALLOWED = [
"azure-keyvault"
Expand Down
7 changes: 7 additions & 0 deletions scripts/devops_tasks/setup_execute_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,13 @@ def execute_global_install_and_test(
action="store_true",
)

parser.add_argument(
"--tenvparallel",
default=False,
help=("Run individual tox env for each package in parallel."),
action="store_true",
)

parser.add_argument(
"--service",
help=(
Expand Down
26 changes: 26 additions & 0 deletions scripts/devops_tasks/tox_harness.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,26 @@ def execute_tox_parallel(tox_command_tuples):
exit(1)


def replace_dev_reqs(file):
adjusted_req_lines = []

with open(file, "r") as f:
for line in f:
args = [
part.strip()
for part in line.split()
if part and not part.strip() == "-e"
]
amended_line = " ".join(args)
adjusted_req_lines.append(amended_line)

with open(file, "w") as f:
# note that we directly use '\n' here instead of os.linesep due to how f.write() actually handles this stuff internally
# If a file is opened in text mode (the default), during write python will accidentally double replace due to "\r" being
# replaced with "\r\n" on Windows. Result: "\r\n\n". Extra line breaks!
f.write("\n".join(adjusted_req_lines))


def execute_tox_serial(tox_command_tuples):
for index, cmd_tuple in enumerate(tox_command_tuples):
tox_dir = os.path.join(cmd_tuple[1], "./.tox/")
Expand Down Expand Up @@ -260,9 +280,15 @@ def prep_and_run_tox(targeted_packages, parsed_args, options_array=[]):
with open(destination_dev_req, "w+") as file:
file.write("\n")

if in_ci():
replace_dev_reqs(destination_dev_req)

if parsed_args.tox_env:
tox_execution_array.extend(["-e", parsed_args.tox_env])

if parsed_args.tenvparallel:
tox_execution_array.extend(["-p", "all"])

if local_options_array:
tox_execution_array.extend(["--"] + local_options_array)

Expand Down
1 change: 0 additions & 1 deletion sdk/core/azure-core/dev_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,3 @@ opencensus>=0.6.0
opencensus-ext-azure>=0.3.1
opencensus-ext-threading
mock

Empty file.
17 changes: 9 additions & 8 deletions sdk/storage/azure-storage-blob/tests/test_append_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
remove,
)
import unittest
import uuid
from datetime import datetime, timedelta

from azure.core import MatchConditions
Expand Down Expand Up @@ -926,7 +927,7 @@ def test_append_blob_from_path_chunked_upload(self, resource_group, location, st
self._setup(bsc)
blob = self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'from_path_chunked_upload.temp.dat'
FILE_PATH = 'from_path_chunked_upload.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand All @@ -949,7 +950,7 @@ def test_append_blob_from_path_with_progress_chunked_upload(self, resource_group
self._setup(bsc)
blob = self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'progress_chunked_upload.temp.dat'
FILE_PATH = 'progress_chunked_upload.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand Down Expand Up @@ -984,7 +985,7 @@ def test_append_blob_from_stream_chunked_upload(self, resource_group, location,
self._setup(bsc)
blob = self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'stream_chunked_upload.temp.dat'
FILE_PATH = 'stream_chunked_upload.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand All @@ -1006,7 +1007,7 @@ def test_app_blob_from_stream_nonseekable_chnked_upload_known_size(self, resourc
self._setup(bsc)
blob = self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'upload_known_size.temp.dat'
FILE_PATH = 'upload_known_size.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)
blob_size = len(data) - 66
Expand All @@ -1027,7 +1028,7 @@ def test_app_blob_from_stream_nonseekable_chnked_upload_unk_size(self, resource_
self._setup(bsc)
blob = self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'upload_unk_size.temp.dat'
FILE_PATH = 'upload_unk_size.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand All @@ -1047,7 +1048,7 @@ def test_append_blob_from_stream_with_multiple_appends(self, resource_group, loc
self._setup(bsc)
blob = self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'multiple_appends.temp.dat'
FILE_PATH = 'multiple_appends.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream1:
stream1.write(data)
with open(FILE_PATH, 'wb') as stream2:
Expand All @@ -1071,7 +1072,7 @@ def test_append_blob_from_stream_chunked_upload_with_count(self, resource_group,
self._setup(bsc)
blob = self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'upload_with_count.temp.dat'
FILE_PATH = 'upload_with_count.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand All @@ -1093,7 +1094,7 @@ def test_append_blob_from_stream_chunked_upload_with_count_parallel(self, resour
self._setup(bsc)
blob = self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'upload_with_count_parallel.temp.dat'
FILE_PATH = 'upload_with_count_parallel.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand Down
19 changes: 10 additions & 9 deletions sdk/storage/azure-storage-blob/tests/test_append_blob_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
remove,
)
import unittest
import uuid

from azure.core import MatchConditions
from azure.core.exceptions import HttpResponseError, ResourceNotFoundError, ResourceModifiedError
Expand All @@ -34,7 +35,7 @@

# ------------------------------------------------------------------------------
TEST_BLOB_PREFIX = 'blob'
FILE_PATH = 'blob_input.temp.dat'
FILE_PATH = 'blob_input.temp.{}.dat'.format(str(uuid.uuid4()))
LARGE_BLOB_SIZE = 64 * 1024


Expand Down Expand Up @@ -1020,7 +1021,7 @@ async def test_append_blob_from_path_chunked_upload_async(self, resource_group,
await self._setup(bsc)
blob = await self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'path_chunked_upload_async.temp.dat'
FILE_PATH = 'path_chunked_upload_async.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand All @@ -1045,7 +1046,7 @@ async def test_append_blob_from_path_with_progress_chunked_upload_async(self, re
await self._setup(bsc)
blob = await self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'progress_chnked_upload_async.temp.dat'
FILE_PATH = 'progress_chnked_upload_async.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand Down Expand Up @@ -1082,7 +1083,7 @@ async def test_append_blob_from_stream_chunked_upload_async(self, resource_group
await self._setup(bsc)
blob = await self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'stream_chunked_upload_async.temp.dat'
FILE_PATH = 'stream_chunked_upload_async.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand All @@ -1107,7 +1108,7 @@ async def test_append_blob_from_stream_non_seekable_chunked_upload_known_size_as
await self._setup(bsc)
blob = await self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'chnkd_upld_knwn_size_async.temp.dat'
FILE_PATH = 'chnkd_upld_knwn_size_async.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)
blob_size = len(data) - 66
Expand All @@ -1131,7 +1132,7 @@ async def test_append_blob_from_stream_non_seekable_chunked_upload_unknown_size_
await self._setup(bsc)
blob = await self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'nonseek_chnked_upld_unk_size_async.temp.dat'
FILE_PATH = 'nonseek_chnked_upld_unk_size_async.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand All @@ -1153,7 +1154,7 @@ async def test_append_blob_from_stream_with_multiple_appends_async(self, resourc
await self._setup(bsc)
blob = await self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'stream_with_multiple_appends_async.temp.dat'
FILE_PATH = 'stream_with_multiple_appends_async.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream1:
stream1.write(data)
with open(FILE_PATH, 'wb') as stream2:
Expand All @@ -1179,7 +1180,7 @@ async def test_append_blob_from_stream_chunked_upload_with_count_async(self, res
await self._setup(bsc)
blob = await self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'hnked_upload_w_count_async.temp.dat'
FILE_PATH = 'hnked_upload_w_count_async.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand All @@ -1204,7 +1205,7 @@ async def test_append_blob_from_stream_chunked_upload_with_count_parallel_async(
await self._setup(bsc)
blob = await self._create_blob(bsc)
data = self.get_random_bytes(LARGE_BLOB_SIZE)
FILE_PATH = 'upload_w_count_parallel_async.temp.dat'
FILE_PATH = 'upload_w_count_parallel_async.temp.{}.dat'.format(str(uuid.uuid4()))
with open(FILE_PATH, 'wb') as stream:
stream.write(data)

Expand Down
Loading