Skip to content

Commit

Permalink
add admin scripts to sbin
Browse files Browse the repository at this point in the history
Signed-off-by: shane-huang <shengsheng.huang@intel.com>
  • Loading branch information
shane-huang committed Sep 23, 2013
1 parent dfbdc9d commit fcfe4f9
Show file tree
Hide file tree
Showing 14 changed files with 47 additions and 47 deletions.
12 changes: 6 additions & 6 deletions docs/spark-standalone.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,12 @@ To launch a Spark standalone cluster with the launch scripts, you need to create

Once you've set up this file, you can launch or stop your cluster with the following shell scripts, based on Hadoop's deploy scripts, and available in `SPARK_HOME/bin`:

- `bin/start-master.sh` - Starts a master instance on the machine the script is executed on.
- `bin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file.
- `bin/start-all.sh` - Starts both a master and a number of slaves as described above.
- `bin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script.
- `bin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`.
- `bin/stop-all.sh` - Stops both the master and the slaves as described above.
- `sbin/start-master.sh` - Starts a master instance on the machine the script is executed on.
- `sbin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file.
- `sbin/start-all.sh` - Starts both a master and a number of slaves as described above.
- `sbin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script.
- `sbin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`.
- `sbin/stop-all.sh` - Stops both the master and the slaves as described above.

Note that these scripts must be executed on the machine you want to run the Spark master on, not your local machine.

Expand Down
File renamed without changes.
File renamed without changes.
6 changes: 3 additions & 3 deletions bin/slaves.sh → sbin/slaves.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@ if [ $# -le 0 ]; then
exit 1
fi

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

# If the slaves file is specified in the command line,
# then it takes precedence over the definition in
Expand Down
File renamed without changes.
6 changes: 3 additions & 3 deletions bin/spark-daemon.sh → sbin/spark-daemon.sh
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@ if [ $# -le 1 ]; then
exit 1
fi

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

# get arguments
startStop=$1
Expand Down
8 changes: 4 additions & 4 deletions bin/spark-daemons.sh → sbin/spark-daemons.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ if [ $# -le 1 ]; then
exit 1
fi

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/spark-daemon.sh" "$@"
exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@"
10 changes: 5 additions & 5 deletions bin/start-all.sh → sbin/start-all.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@
# Starts the master on this node.
# Starts a worker on each node specified in conf/slaves

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

# Load the Spark configuration
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

# Start Master
"$bin"/start-master.sh
"$sbin"/start-master.sh

# Start Workers
"$bin"/start-slaves.sh
"$sbin"/start-slaves.sh
8 changes: 4 additions & 4 deletions bin/start-master.sh → sbin/start-master.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@

# Starts the master on the machine this script is executed on.

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
Expand All @@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi

"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
6 changes: 3 additions & 3 deletions bin/start-slave.sh → sbin/start-slave.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
# Usage: start-slave.sh <worker#> <master-spark-URL>
# where <master-spark-URL> is like "spark://localhost:7077"

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

# Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI
if [ "$SPARK_PUBLIC_DNS" = "" ]; then
Expand All @@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi

"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
"$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
10 changes: 5 additions & 5 deletions bin/start-slaves.sh → sbin/start-slaves.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@
# limitations under the License.
#

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
Expand All @@ -37,12 +37,12 @@ fi

# Launch the slaves
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
else
if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then
SPARK_WORKER_WEBUI_PORT=8081
fi
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
"$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
done
fi
10 changes: 5 additions & 5 deletions bin/stop-all.sh → sbin/stop-all.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@
# Run this on the master nde


bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

# Load the Spark configuration
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

# Stop the slaves, then the master
"$bin"/stop-slaves.sh
"$bin"/stop-master.sh
"$sbin"/stop-slaves.sh
"$sbin"/stop-master.sh
8 changes: 4 additions & 4 deletions bin/stop-master.sh → sbin/stop-master.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@

# Starts the master on the machine this script is executed on.

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
10 changes: 5 additions & 5 deletions bin/stop-slaves.sh → sbin/stop-slaves.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,19 @@

# Starts the master on the machine this script is executed on.

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
fi

if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
"$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
else
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
"$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
done
fi

0 comments on commit fcfe4f9

Please sign in to comment.