Skip to content

Commit 676f60f

Browse files
committed
Merge pull request apache#25 from mesosphere/spark-new-config
[SPARK-225] new config
2 parents 356b026 + 9c215b7 commit 676f60f

File tree

13 files changed

+206
-271
lines changed

13 files changed

+206
-271
lines changed

Makefile

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
docker:
2+
bin/make-docker.sh
3+
4+
package:
5+
bin/make-package.py
6+
7+
universe:
8+
bin/make-universe.sh
9+
10+
test:
11+
bin/test.sh
12+
13+
.PHONY: package docker universe test

README.md

Lines changed: 18 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1,62 +1,47 @@
1-
Spark DCOS Package
2-
===
1+
# Spark DCOS Package
32

43
This repo lets you configure, build, and test a new Spark DCOS package.
54
It is the source for the Spark package in universe. If you wish to modify
65
that package, you should do so here, and generate a new package as
76
described below.
87

9-
Configuring
10-
---
8+
## Configuring
119

1210
edit `manifest.json`.
1311

14-
Create a package
15-
---
12+
## Push a docker image
13+
14+
This will make a docker image from the distribution specified in `manifest.json`
1615

1716
```
18-
export DOCKER_IMAGE=...
19-
./bin/make-package.sh
17+
DOCKER_IMAGE=<name> make docker
2018
```
2119

22-
This command builds a docker image, pushes it, and writes a new
23-
package to `build/package`. It uses the components listed in
24-
`manifest.json`.
20+
## Create a package
2521

26-
Create a universe
27-
---
22+
Write a package to `build/package`. Use the `DOCKER_IMAGE` name you
23+
created above.
2824

2925
```
30-
./bin/make-universe.sh
26+
DOCKER_IMAGE=<name> make package
3127
```
3228

33-
This produces a new universe in `build/universe`. You can then point your
34-
local `dcos` to this location via `dcos config set package.sources`.
29+
## Create a universe
3530

36-
Create a docker image
37-
---
31+
Write a universe to `build/universe`. You can then upload this to
32+
e.g. S3, and point your DCOS cluster at it via `dcos package repo
33+
add`.
3834

3935
```
40-
./bin/make-docker.sh <spark-dist> <image>
36+
make universe
4137
```
4238

43-
* `<spark-dist>`: path to spark distribution
44-
* `<image>`: name of docker image
45-
46-
This creates a new docker image from the given spark distribution. It
47-
is called by `./bin/make-package.sh`.
4839

49-
50-
Test
51-
---
40+
## Test
5241

5342
```
5443
./bin/test.sh
5544
```
5645

57-
This performs every build step, including tests. It builds spark, the docker image,
58-
the package, and the universe. It spins up a CCM cluster and tests spark against that
59-
cluster.
60-
61-
It requires several env variables. Read the comment at the top of the file for a
62-
complete description.
46+
This requires several env variables, and is primarily used in CI.
47+
Read the comment at the top of the file for a complete description.

bin/make-docker.sh

Lines changed: 27 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,35 @@
11
#!/usr/bin/env bash
22

3-
# Usage:
4-
# ./bin/make-docker.sh <spark-dist-dir> <image>
3+
set -e -o pipefail
54

5+
# ENV vars:
6+
# DOCKER_IMAGE - <image>:<version>
7+
# SPARK_DIST_URI (optional) - e.g. http://<domain>/spark-1.2.3.tgz
8+
9+
if [ -z "${SPARK_DIST_URI}" ]; then
10+
SPARK_URI=$(cat manifest.json | jq .spark_uri)
11+
SPARK_URI="${SPARK_URI%\"}"
12+
SPARK_URI="${SPARK_URI#\"}"
13+
SPARK_DIST_URI=${SPARK_URI}
14+
fi
15+
16+
DIST_TGZ=$(basename "${SPARK_DIST_URI}")
17+
DIST="${DIST_TGZ%.*}"
18+
19+
# fetch spark
20+
mkdir -p build/dist
21+
[ -f "build/dist/${DIST_TGZ}" ] || curl -o "build/dist/${DIST_TGZ}" "${SPARK_DIST_URI}"
22+
tar xvf build/dist/spark*.tgz -C build/dist
23+
24+
# create docker context
625
rm -rf build/docker
726
mkdir -p build/docker/dist
8-
cp -r "$1/." build/docker/dist
27+
cp -r "build/dist/${DIST}/." build/docker/dist
928
cp -r conf/* build/docker/dist/conf
1029
cp -r docker/* build/docker
1130

12-
pushd build/docker
13-
docker build -t $2 .
14-
popd
31+
# build docker
32+
(cd build/docker && docker build -t "${DOCKER_IMAGE}" .)
33+
34+
# push docker
35+
docker push "${DOCKER_IMAGE}"

bin/make-manifest.sh

Lines changed: 0 additions & 33 deletions
This file was deleted.

bin/make-package.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,8 @@
66
import sys
77

88
def main():
9-
try:
10-
os.mkdir('build/package')
11-
except OSError:
12-
pass
9+
if not os.path.isdir('build/package'):
10+
os.makedirs('build/package')
1311

1412
with open("manifest.json") as f:
1513
manifest = json.load(f)

bin/make-package.sh

Lines changed: 0 additions & 31 deletions
This file was deleted.

bin/make-universe.sh

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
#!/usr/bin/env bash
22

3-
# creates build/spark-universe, build/spark-universe.zip
3+
# creates
4+
# - build/spark-universe
5+
# - build/spark-universe.zip
46

57
set -x -e
68

79
rm -rf build/spark-universe*
810

911
# make spark package
10-
./bin/make-package.py
12+
# TODO(mgummelt): remove this after some time
13+
# ./bin/make-package.py
1114

1215
# download universe
1316
wget -O build/spark-universe.zip https://github.com/mesosphere/universe/archive/version-2.x.zip

conf/spark-env.sh

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44
# Copy it as spark-env.sh and edit that to configure Spark for your site.
55

66
# A custom HDFS config can be fetched via spark.mesos.uris. This
7-
# moves those config files into the standard directory.
7+
# moves those config files into the standard directory. In DCOS, the
8+
# CLI reads the "SPARK_HDFS_CONFIG_URL" marathon label in order to set
9+
# spark.mesos.uris
810
mkdir -p "${HADOOP_CONF_DIR}"
911
[ -f "${MESOS_SANDBOX}/hdfs-site.xml" ] && cp "${MESOS_SANDBOX}/hdfs-site.xml" "${HADOOP_CONF_DIR}"
1012
[ -f "${MESOS_SANDBOX}/core-site.xml" ] && cp "${MESOS_SANDBOX}/core-site.xml" "${HADOOP_CONF_DIR}"

docker/runit/init.sh

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,17 +15,18 @@ if [ "${SPARK_SSL_ENABLED}" == true ]; then
1515
OTHER_SCHEME=http
1616
fi
1717

18-
export WEBUI_URL="${SCHEME}://${FRAMEWORK_NAME}${DNS_SUFFIX}:${SPARK_PROXY_PORT}"
19-
export HISTORY_SERVER_WEB_PROXY_BASE="/service/${FRAMEWORK_NAME}/history"
20-
export DISPATCHER_UI_WEB_PROXY_BASE="/service/${FRAMEWORK_NAME}"
18+
# TODO(mgummelt): I'm pretty sure this isn't used. Remove after some time.
19+
# export WEBUI_URL="${SCHEME}://${FRAMEWORK_NAME}${DNS_SUFFIX}:${SPARK_PROXY_PORT}"
20+
21+
export HISTORY_SERVER_WEB_PROXY_BASE="/service/${DCOS_SERVICE_NAME}/history"
22+
export DISPATCHER_UI_WEB_PROXY_BASE="/service/${DCOS_SERVICE_NAME}"
2123

2224
# configure history server
2325
if [ "${ENABLE_HISTORY_SERVER:=false}" = "true" ]; then
2426
ln -s /var/lib/runit/service/history-server /etc/service/history-server
2527
fi
2628

27-
# remove whole lines with the wrong scheme, remove #<SCHEME># string only for
28-
# the scheme we want to configure.
29+
# Update nginx spark.conf to use http or https
2930
grep -v "#${OTHER_SCHEME}#" /etc/nginx/conf.d/spark.conf.template |
3031
sed "s,#${SCHEME}#,," >/etc/nginx/conf.d/spark.conf
3132

@@ -39,7 +40,7 @@ sed -i "s,<PROTOCOL>,${SPARK_SSL_PROTOCOL}," /etc/nginx/conf.d/spark.conf
3940
# bytes cipher strings of Java.
4041
# sed -i "s,<ENABLED_ALGORITHMS>,${SPARK_SSL_ENABLEDALGORITHMS//,/:}," /etc/nginx/conf.d/spark.conf
4142

42-
# extract cert and key from keystore
43+
# extract cert and key from keystore, write to /etc/nginx/spark.{crt,key}
4344
if [ "${SPARK_SSL_ENABLED}" == true ]; then
4445
KEYDIR=`mktemp -d`
4546
trap "rm -rf $KEYDIR" EXIT

docker/runit/service/spark/run

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,25 @@ export APPLICATION_WEB_PROXY_BASE="${DISPATCHER_UI_WEB_PROXY_BASE}"
88

99
cd /opt/spark/dist
1010

11-
if [ "$FRAMEWORK_NAME" != "spark" ]; then
12-
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.deploy.zookeeper.dir=/spark_mesos_dispatcher_$FRAMEWORK_NAME"
11+
export SPARK_DAEMON_JAVA_OPTS=""
12+
if [ "${DCOS_SERVICE_NAME}" != "spark" ]; then
13+
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.deploy.zookeeper.dir=/spark_mesos_dispatcher_${DCOS_SERVICE_NAME}"
1314
fi
1415

1516
if [ "$SPARK_DISPATCHER_MESOS_ROLE" != "" ]; then
1617
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.role=$SPARK_DISPATCHER_MESOS_ROLE"
1718
fi
1819

20+
if [ "$SPARK_DISPATCHER_MESOS_PRINCIPAL" != "" ]; then
21+
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.principal=$SPARK_DISPATCHER_MESOS_PRINCIPAL"
22+
fi
23+
24+
if [ "$SPARK_DISPATCHER_MESOS_SECRET" != "" ]; then
25+
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.secret=$SPARK_DISPATCHER_MESOS_SECRET"
26+
fi
27+
28+
29+
1930
HISTORY_SERVER_CONF=""
2031
if [ "${ENABLE_HISTORY_SERVER:=false}" = "true" ]; then
2132
HISTORY_SERVER_CONF="spark.mesos.historyServer.url=${HISTORY_SERVER_WEB_PROXY_BASE}"
@@ -39,12 +50,13 @@ add_if_non_empty spark.ssl.trustStorePassword "${SPARK_SSL_TRUSTSTOREPASSWORD}"
3950
add_if_non_empty spark.ssl.protocol "${SPARK_SSL_PROTOCOL}"
4051
add_if_non_empty spark.ssl.enabledAlgorithms "${SPARK_SSL_ENABLEDALGORITHMS}"
4152

53+
export ZK="master.mesos:2181"
4254
exec /opt/spark/dist/bin/spark-class \
4355
org.apache.spark.deploy.mesos.MesosClusterDispatcher \
4456
--port "${DISPATCHER_PORT}" \
4557
--webui-port "${DISPATCHER_UI_PORT}" \
46-
--master "${MESOS_MASTER}" \
58+
--master "mesos://zk://${ZK}/mesos" \
4759
--zk "${ZK}" \
4860
--host "${HOST}" \
49-
--name "${FRAMEWORK_NAME}" \
61+
--name "${DCOS_SERVICE_NAME}" \
5062
--properties-file "conf/mesos-cluster-dispatcher.properties"

0 commit comments

Comments
 (0)