Skip to content

Commit a0e2126

Browse files
authored
Merge pull request apache#61 from mesosphere/migrate-test-runner
test runner
2 parents b733db4 + e62cb58 commit a0e2126

File tree

5 files changed

+68
-9
lines changed

5 files changed

+68
-9
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
build/
2+
tests/env

bin/jenkins.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ function spark_test {
8686
docker_login
8787
make docker
8888
CLUSTER_NAME=spark-package-${BUILD_NUMBER} \
89-
TEST_RUNNER_DIR=$(pwd)/../mesos-spark-integration-tests/test-runner/ \
89+
TEST_DIR=$(pwd)/../mesos-spark-integration-tests/ \
9090
DCOS_CHANNEL=testing/master \
9191
DCOS_USERNAME=bootstrapuser \
9292
DCOS_PASSWORD=deleteme \

bin/test.sh

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
#
66
# ENV vars:
77
#
8-
# TEST_RUNNER_DIR - mesos-spark-integration-tests/test-runner/
8+
# TEST_JAR_PATH - /path/to/mesos-spark-integration-tests.jar
99
# DOCKER_IMAGE - Docker image used to make the DC/OS package
1010
#
1111
# # CCM Env Vars:
@@ -83,13 +83,13 @@ install_spark() {
8383
}
8484

8585
run_tests() {
86-
pushd ${TEST_RUNNER_DIR}
87-
sbt -Dconfig.file=src/main/resources/dcos-application.conf \
88-
-Daws.access_key=${AWS_ACCESS_KEY_ID} \
89-
-Daws.secret_key=${AWS_SECRET_ACCESS_KEY} \
90-
-Daws.s3.bucket=${S3_BUCKET} \
91-
-Daws.s3.prefix=${S3_PREFIX} \
92-
"dcos"
86+
pushd tests
87+
if [[ ! -d env ]]; then
88+
virtualenv -p python3 env
89+
fi
90+
source env/bin/activate
91+
pip install -r requirements.txt
92+
python test.py
9393
popd
9494
}
9595

tests/requirements.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
boto
2+
-e git+https://github.com/dcos/shakedown.git@master#egg=shakedown

tests/test.py

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
from boto.s3.connection import S3Connection
2+
from boto.s3.key import Key
3+
import re
4+
import os
5+
import subprocess
6+
import shakedown
7+
8+
def upload_jar(jar):
9+
conn = S3Connection(os.environ['AWS_ACCESS_KEY_ID'], os.environ['AWS_SECRET_ACCESS_KEY'])
10+
bucket = conn.get_bucket(os.environ['S3_BUCKET'])
11+
12+
key = Key(bucket, 'S3_PREFIX')
13+
key.metadata = {'Content-Type': 'application/java-archive'}
14+
key.set_contents_from_filename(jar)
15+
key.make_public()
16+
17+
basename = os.path.basename(jar)
18+
19+
jar_url = "http://{0}.s3.amazonaws.com/{1}{2}".format(
20+
os.environ['S3_BUCKET'],
21+
os.environ['S3_PREFIX'],
22+
basename)
23+
24+
return jar_url
25+
26+
27+
def submit_job(jar_url):
28+
spark_job_runner_args = 'http://leader.mesos:5050 dcos \\"*\\" spark:only 2'
29+
submit_args = "-Dspark.driver.memory=2g --class com.typesafe.spark.test.mesos.framework.runners.SparkJobRunner {0} {1}".format(
30+
jar_url, spark_job_runner_args)
31+
cmd = 'dcos --log-level=DEBUG spark --verbose run --submit-args="{0}"'.format(submit_args)
32+
print('Running {}'.format(cmd))
33+
stdout = subprocess.check_output(cmd, shell=True).decode('utf-8')
34+
print(stdout)
35+
36+
regex = r"Submission id: (\S+)"
37+
match = re.search(regex, stdout)
38+
return match.group(1)
39+
40+
def task_log(task_id):
41+
cmd = "dcos task log --completed --lines=1000 {}".format(task_id)
42+
print('Running {}'.format(cmd))
43+
stdout = subprocess.check_output(cmd, shell=True).decode('utf-8')
44+
return stdout
45+
46+
47+
def main():
48+
jar_url = upload_jar(os.getenv('TEST_JAR_PATH'))
49+
task_id = submit_job(jar_url)
50+
print('Waiting for task id={} to complete'.format(task_id))
51+
shakedown.wait_for_task_completion(task_id)
52+
log = task_log(task_id)
53+
print(log)
54+
assert "All tests passed" in log
55+
56+
main()

0 commit comments

Comments
 (0)