Skip to content

Commit cfab475

Browse files
authored
[SPARK-451] Use run-tests.py to run tests (apache#140)
* User run-tests.py to run tests * Add .cache to .gitignore * remove comments * fix test jar file path * provide HDFS_SERVICE_NAME when uninstalling * remove install_cli from publish jobs
1 parent 49732db commit cfab475

File tree

8 files changed

+30
-49
lines changed

8 files changed

+30
-49
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
.cache/
12
build/
23
dcos-commons-tools/
34
tests/env

bin/jenkins-dist-publish.sh

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,6 @@ SPARK_VERSION=${GIT_BRANCH#origin/tags/custom-} # e.g. "2.0.2"
3434
source "${DIR}/jenkins.sh"
3535

3636
pushd "${SPARK_BUILD_DIR}"
37-
install_cli
3837
docker_login
39-
# build_and_test
4038
publish_dists
4139
popd

bin/jenkins-dist-test.sh

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ SPARK_DIR="${DIR}/../../spark"
88

99
function run() {
1010
source bin/jenkins.sh
11-
install_cli
1211
docker_login
1312
build_and_test
1413
}

bin/jenkins-package-publish.sh

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@ source "${DIR}/jenkins.sh"
3636

3737
pushd "${SPARK_BUILD_DIR}"
3838
SPARK_VERSION=$(jq -r ".spark_version" manifest.json)
39-
install_cli
4039
docker_login
4140
publish_docker_images
4241
make_universe

bin/jenkins-package-test.sh

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,7 @@ SPARK_BUILD_DIR=${DIR}/..
77

88
function run() {
99
source bin/jenkins.sh
10-
install_cli
1110
docker_login
12-
1311
make --directory=dispatcher universe
1412
export $(cat "${WORKSPACE}/stub-universe.properties")
1513
make test

bin/jenkins.sh

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -112,20 +112,6 @@ function docker_version() {
112112
echo "${SPARK_BUILD_VERSION}-hadoop-$1"
113113
}
114114

115-
function install_cli {
116-
curl -O https://downloads.mesosphere.io/dcos-cli/install.sh
117-
rm -rf dcos-cli/
118-
mkdir dcos-cli
119-
bash install.sh dcos-cli http://change.me --add-path no
120-
source dcos-cli/bin/env-setup
121-
122-
# hack because the installer forces an old CLI version
123-
pip install -U dcoscli
124-
125-
# needed in `make test`
126-
pip3 install jsonschema
127-
}
128-
129115
function docker_login {
130116
docker login --email=docker@mesosphere.io --username="${DOCKER_USERNAME}" --password="${DOCKER_PASSWORD}"
131117
}

bin/test.sh

Lines changed: 5 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ set -x
77
set -o pipefail
88

99
BIN_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
10+
SPARK_BUILD_DIR="${BIN_DIR}/.."
1011

1112
check_env() {
1213
# Check env early, before starting the cluster:
@@ -36,16 +37,6 @@ start_cluster() {
3637
fi
3738
}
3839

39-
configure_cli() {
40-
dcos config set core.dcos_url "${DCOS_URL}"
41-
dcos config set core.ssl_verify false
42-
${COMMONS_DIR}/tools/dcos_login.py
43-
dcos config show
44-
if [ -n "${STUB_UNIVERSE_URL}" ]; then
45-
dcos package repo add --index=0 spark-test "${STUB_UNIVERSE_URL}"
46-
fi
47-
}
48-
4940
initialize_service_account() {
5041
if [ "$SECURITY" = "strict" ]; then
5142
${COMMONS_DIR}/tools/create_service_account.sh --strict
@@ -59,21 +50,15 @@ build_scala_test_jar() {
5950
}
6051

6152
run_tests() {
62-
pushd tests
63-
if [[ ! -d venv ]]; then
64-
virtualenv -p python3 venv
65-
fi
66-
source venv/bin/activate
67-
pip install -r requirements.txt
68-
SCALA_TEST_JAR_PATH=$(pwd)/jobs/scala/target/scala-2.11/dcos-spark-scala-tests-assembly-0.1-SNAPSHOT.jar \
69-
py.test -s test.py
70-
popd
53+
SCALA_TEST_JAR_PATH=${SPARK_BUILD_DIR}/tests/jobs/scala/target/scala-2.11/dcos-spark-scala-tests-assembly-0.1-SNAPSHOT.jar \
54+
CLUSTER_URL=${DCOS_URL} \
55+
STUB_UNIVERSE_URL=${STUB_UNIVERSE_URL} \
56+
${COMMONS_DIR}/tools/run_tests.py shakedown ${SPARK_BUILD_DIR}/tests ${SPARK_BUILD_DIR}/tests/requirements.txt
7157
}
7258

7359
check_env
7460
start_cluster
7561
# TODO: Migrate the following three commands to dcos-commons-tools/run-tests.py
76-
configure_cli
7762
initialize_service_account
7863
build_scala_test_jar
7964
run_tests

tests/test.py renamed to tests/test_spark.py

Lines changed: 24 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,16 @@
2525
def _init_logging():
2626
logging.basicConfig(level=logging.INFO)
2727
logging.getLogger('dcos').setLevel(logging.WARNING)
28+
logging.getLogger('requests').setLevel(logging.WARNING)
2829

2930

3031
_init_logging()
3132
LOGGER = logging.getLogger(__name__)
3233
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
34+
DEFAULT_HDFS_TASK_COUNT=10
35+
HDFS_PACKAGE_NAME='beta-hdfs'
36+
HDFS_SERVICE_NAME='hdfs'
37+
SPARK_PACKAGE_NAME='spark'
3338

3439

3540
def setup_module(module):
@@ -39,12 +44,13 @@ def setup_module(module):
3944

4045

4146
def teardown_module(module):
42-
shakedown.uninstall_package_and_wait('spark')
47+
shakedown.uninstall_package_and_wait(SPARK_PACKAGE_NAME)
4348
if _hdfs_enabled():
44-
shakedown.uninstall_package_and_wait('hdfs')
45-
_run_janitor('hdfs')
49+
shakedown.uninstall_package_and_wait(HDFS_PACKAGE_NAME, HDFS_SERVICE_NAME)
50+
_run_janitor(HDFS_PACKAGE_NAME)
4651

4752

53+
@pytest.mark.sanity
4854
def test_jar():
4955
master_url = ("https" if _is_strict() else "http") + "://leader.mesos:5050"
5056
spark_job_runner_args = '{} dcos \\"*\\" spark:only 2 --auth-token={}'.format(
@@ -57,6 +63,7 @@ def test_jar():
5763
["--class", 'com.typesafe.spark.test.mesos.framework.runners.SparkJobRunner'])
5864

5965

66+
@pytest.mark.sanity
6067
def test_teragen():
6168
if _hdfs_enabled():
6269
jar_url = "https://downloads.mesosphere.io/spark/examples/spark-terasort-1.0-jar-with-dependencies_2.11.jar"
@@ -66,6 +73,7 @@ def test_teragen():
6673
["--class", "com.github.ehiggs.spark.terasort.TeraGen"])
6774

6875

76+
@pytest.mark.sanity
6977
def test_python():
7078
python_script_path = os.path.join(THIS_DIR, 'jobs', 'python', 'pi_with_include.py')
7179
python_script_url = _upload_file(python_script_path)
@@ -100,6 +108,7 @@ def test_kerberos():
100108
"--conf", "sun.security.krb5.debug=true"])
101109

102110

111+
@pytest.mark.sanity
103112
def test_r():
104113
r_script_path = os.path.join(THIS_DIR, 'jobs', 'R', 'dataframe.R')
105114
r_script_url = _upload_file(r_script_path)
@@ -108,6 +117,7 @@ def test_r():
108117
"Justin")
109118

110119

120+
@pytest.mark.sanity
111121
def test_cni():
112122
SPARK_EXAMPLES="http://downloads.mesosphere.com/spark/assets/spark-examples_2.11-2.0.1.jar"
113123
_run_tests(SPARK_EXAMPLES,
@@ -117,6 +127,7 @@ def test_cni():
117127
"--class", "org.apache.spark.examples.SparkPi"])
118128

119129

130+
@pytest.mark.sanity
120131
def test_s3():
121132
linecount_path = os.path.join(THIS_DIR, 'resources', 'linecount.txt')
122133
s3.upload_file(linecount_path)
@@ -147,14 +158,14 @@ def _hdfs_enabled():
147158
def _require_hdfs():
148159
LOGGER.info("Ensuring HDFS is installed.")
149160

150-
_require_package('hdfs', _get_hdfs_options())
161+
_require_package(HDFS_PACKAGE_NAME, _get_hdfs_options())
151162
_wait_for_hdfs()
152163

153164

154165
def _require_spark():
155166
LOGGER.info("Ensuring Spark is installed.")
156167

157-
_require_package('spark', _get_spark_options())
168+
_require_package(SPARK_PACKAGE_NAME, _get_spark_options())
158169
_wait_for_spark()
159170

160171

@@ -187,17 +198,18 @@ def _get_hdfs_options():
187198
if _is_strict():
188199
options = {'service': {'principal': 'service-acct', 'secret_name': 'secret'}}
189200
else:
190-
options = {}
201+
options = {"service": {}}
202+
203+
options["service"]["beta-optin"] = True
191204
return options
192205

193206

194207
def _wait_for_hdfs():
195208
shakedown.wait_for(_is_hdfs_ready, ignore_exceptions=False, timeout_seconds=900)
196209

197210

198-
DEFAULT_HDFS_TASK_COUNT=10
199211
def _is_hdfs_ready(expected_tasks = DEFAULT_HDFS_TASK_COUNT):
200-
running_tasks = [t for t in shakedown.get_service_tasks('hdfs') \
212+
running_tasks = [t for t in shakedown.get_service_tasks(HDFS_SERVICE_NAME) \
201213
if t['state'] == 'TASK_RUNNING']
202214
return len(running_tasks) >= expected_tasks
203215

@@ -231,7 +243,10 @@ def _install_spark():
231243
"principal": "service-acct"}
232244
options['security'] = {"mesos": {"authentication": {"secret_name": "secret"}}}
233245

234-
shakedown.install_package('spark', options_json=options, wait_for_completion=True)
246+
shakedown.install_package(
247+
SPARK_PACKAGE_NAME,
248+
options_json=options,
249+
wait_for_completion=True)
235250

236251
def pred():
237252
dcos_url = dcos.config.get_config_val("core.dcos_url")

0 commit comments

Comments
 (0)