Skip to content

Commit

Permalink
Merge branch 'scripts-reorg' of github.com:shane-huang/incubator-spar…
Browse files Browse the repository at this point in the history
…k into spark-915-segregate-scripts

Conflicts:
	bin/spark-shell
	core/pom.xml
	core/src/main/scala/org/apache/spark/SparkContext.scala
	core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
	core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
	core/src/test/scala/org/apache/spark/DriverSuite.scala
	python/run-tests
	sbin/compute-classpath.sh
	sbin/spark-class
	sbin/stop-slaves.sh
  • Loading branch information
ScrapCodes committed Jan 2, 2014
2 parents 3713f81 + 52ccf4f commit 980afd2
Show file tree
Hide file tree
Showing 41 changed files with 90 additions and 96 deletions.
11 changes: 4 additions & 7 deletions assembly/src/main/assembly/assembly.xml
Original file line number Diff line number Diff line change
Expand Up @@ -39,23 +39,20 @@
</fileSet>
<fileSet>
<directory>
${project.parent.basedir}/bin/
${project.parent.basedir}/sbin/
</directory>
<outputDirectory>/bin</outputDirectory>
<outputDirectory>/sbin</outputDirectory>
<includes>
<include>**/*</include>
</includes>
</fileSet>
<fileSet>
<directory>
${project.parent.basedir}
${project.parent.basedir}/bin/
</directory>
<outputDirectory>/bin</outputDirectory>
<includes>
<include>run-example*</include>
<include>spark-class*</include>
<include>spark-shell*</include>
<include>spark-executor*</include>
<include>**/*</include>
</includes>
</fileSet>
</fileSets>
Expand Down
4 changes: 2 additions & 2 deletions pyspark → bin/pyspark
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
#

# Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; pwd)"
FWDIR="$(cd `dirname $0`/..; pwd)"

# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"
Expand All @@ -37,7 +37,7 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
fi

# Load environment variables from conf/spark-env.sh, if it exists
if [ -e $FWDIR/conf/spark-env.sh ] ; then
if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
. $FWDIR/conf/spark-env.sh
fi

Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion pyspark2.cmd → bin/pyspark2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ rem
set SCALA_VERSION=2.10

rem Figure out where the Spark framework is installed
set FWDIR=%~dp0
set FWDIR=%~dp0..\

rem Export this as SPARK_HOME
set SPARK_HOME=%FWDIR%
Expand Down
6 changes: 3 additions & 3 deletions run-example → bin/run-example
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ esac
SCALA_VERSION=2.10

# Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; pwd)"
FWDIR="$(cd `dirname $0`/..; pwd)"

# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"

# Load environment variables from conf/spark-env.sh, if it exists
if [ -e $FWDIR/conf/spark-env.sh ] ; then
if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
. $FWDIR/conf/spark-env.sh
fi

Expand Down Expand Up @@ -61,7 +61,7 @@ fi

# Since the examples JAR ideally shouldn't include spark-core (that dependency should be
# "provided"), also add our standard Spark classpath, built using compute-classpath.sh.
CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH"

if $cygwin; then
Expand Down
File renamed without changes.
4 changes: 2 additions & 2 deletions run-example2.cmd → bin/run-example2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ rem
set SCALA_VERSION=2.10

rem Figure out where the Spark framework is installed
set FWDIR=%~dp0
set FWDIR=%~dp0..\

rem Export this as SPARK_HOME
set SPARK_HOME=%FWDIR%
Expand Down Expand Up @@ -49,7 +49,7 @@ if "x%SPARK_EXAMPLES_JAR%"=="x" (

rem Compute Spark classpath using external script
set DONT_PRINT_CLASSPATH=1
call "%FWDIR%bin\compute-classpath.cmd"
call "%FWDIR%sbin\compute-classpath.cmd"
set DONT_PRINT_CLASSPATH=0
set CLASSPATH=%SPARK_EXAMPLES_JAR%;%CLASSPATH%

Expand Down
6 changes: 3 additions & 3 deletions spark-shell → bin/spark-shell
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ esac
# Enter posix mode for bash
set -o posix

FWDIR="`dirname $0`"
FWDIR="$(cd `dirname $0`/..; pwd)"

for o in "$@"; do
if [ "$1" = "-c" -o "$1" = "--cores" ]; then
Expand Down Expand Up @@ -90,10 +90,10 @@ if $cygwin; then
# "Backspace sends ^H" setting in "Keys" section of the Mintty options
# (see https://github.com/sbt/sbt/issues/562).
stty -icanon min 1 -echo > /dev/null 2>&1
$FWDIR/spark-class -Djline.terminal=unix $OPTIONS org.apache.spark.repl.Main "$@"
$FWDIR/sbin/spark-class -Djline.terminal=unix $OPTIONS org.apache.spark.repl.Main "$@"
stty icanon echo > /dev/null 2>&1
else
$FWDIR/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
$FWDIR/sbin/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
fi

# record the exit status lest it be overwritten:
Expand Down
5 changes: 3 additions & 2 deletions spark-shell.cmd → bin/spark-shell.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ rem See the License for the specific language governing permissions and
rem limitations under the License.
rem

set FWDIR=%~dp0
rem Find the path of sbin
set SBIN=%~dp0..\sbin\

cmd /V /E /C %FWDIR%spark-class2.cmd org.apache.spark.repl.Main %*
cmd /V /E /C %SBIN%spark-class2.cmd org.apache.spark.repl.Main %*
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ private[spark] class ExecutorRunner(
// Figure out our classpath with the external compute-classpath script
val ext = if (System.getProperty("os.name").startsWith("Windows")) ".cmd" else ".sh"
val classPath = Utils.executeAndGetOutput(
Seq(sparkHome + "/bin/compute-classpath" + ext),
Seq(sparkHome + "/sbin/compute-classpath" + ext),
extraEnvironment=appDesc.command.environment)

Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ private[spark] class CoarseMesosSchedulerBackend(
CoarseGrainedSchedulerBackend.ACTOR_NAME)
val uri = conf.get("spark.executor.uri", null)
if (uri == null) {
val runScript = new File(sparkHome, "spark-class").getCanonicalPath
val runScript = new File(sparkHome, "./sbin/spark-class").getCanonicalPath
command.setValue(
"\"%s\" org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d".format(
runScript, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
Expand All @@ -136,7 +136,7 @@ private[spark] class CoarseMesosSchedulerBackend(
// glob the directory "correctly".
val basename = uri.split('/').last.split('.').head
command.setValue(
"cd %s*; ./spark-class org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d"
"cd %s*; ./sbin/spark-class org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d"
.format(basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
command.addUris(CommandInfo.URI.newBuilder().setValue(uri))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,12 @@ private[spark] class MesosSchedulerBackend(
.setEnvironment(environment)
val uri = sc.conf.get("spark.executor.uri", null)
if (uri == null) {
command.setValue(new File(sparkHome, "spark-executor").getCanonicalPath)
command.setValue(new File(sparkHome, "/sbin/spark-executor").getCanonicalPath)
} else {
// Grab everything to the first '.'. We'll use that and '*' to
// glob the directory "correctly".
val basename = uri.split('/').last.split('.').head
command.setValue("cd %s*; ./spark-executor".format(basename))
command.setValue("cd %s*; ./sbin/spark-executor".format(basename))
command.addUris(CommandInfo.URI.newBuilder().setValue(uri))
}
val memory = Resource.newBuilder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ private[spark] object UIWorkloadGenerator {

def main(args: Array[String]) {
if (args.length < 2) {
println("usage: ./spark-class org.apache.spark.ui.UIWorkloadGenerator [master] [FIFO|FAIR]")
println("usage: ./sbin/spark-class org.apache.spark.ui.UIWorkloadGenerator [master] [FIFO|FAIR]")
System.exit(1)
}

Expand Down
2 changes: 1 addition & 1 deletion core/src/test/scala/org/apache/spark/DriverSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class DriverSuite extends FunSuite with Timeouts {
val masters = Table(("master"), ("local"), ("local-cluster[2,1,512]"))
forAll(masters) { (master: String) =>
failAfter(60 seconds) {
Utils.execute(Seq("./spark-class", "org.apache.spark.DriverWithoutCleanup", master),
Utils.execute(Seq("./sbin/spark-class", "org.apache.spark.DriverWithoutCleanup", master),
new File(System.getenv("SPARK_HOME")))
}
}
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
4 changes: 2 additions & 2 deletions docs/running-on-yarn.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ There are two scheduler mode that can be used to launch spark application on YAR

The command to launch the YARN Client is as follows:

SPARK_JAR=<SPARK_ASSEMBLY_JAR_FILE> ./spark-class org.apache.spark.deploy.yarn.Client \
SPARK_JAR=<SPARK_ASSEMBLY_JAR_FILE> ./sbin/spark-class org.apache.spark.deploy.yarn.Client \
--jar <YOUR_APP_JAR_FILE> \
--class <APP_MAIN_CLASS> \
--args <APP_MAIN_ARGUMENTS> \
Expand All @@ -79,7 +79,7 @@ For example:

# Submit Spark's ApplicationMaster to YARN's ResourceManager, and instruct Spark to run the SparkPi example
$ SPARK_JAR=./assembly/target/scala-{{site.SCALA_VERSION}}/spark-assembly-{{site.SPARK_VERSION}}-hadoop2.0.5-alpha.jar \
./spark-class org.apache.spark.deploy.yarn.Client \
./sbin/spark-class org.apache.spark.deploy.yarn.Client \
--jar examples/target/scala-{{site.SCALA_VERSION}}/spark-examples-assembly-{{site.SPARK_VERSION}}.jar \
--class org.apache.spark.examples.SparkPi \
--args yarn-standalone \
Expand Down
14 changes: 7 additions & 7 deletions docs/spark-standalone.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ the master's web UI, which is [http://localhost:8080](http://localhost:8080) by

Similarly, you can start one or more workers and connect them to the master via:

./spark-class org.apache.spark.deploy.worker.Worker spark://IP:PORT
./sbin/spark-class org.apache.spark.deploy.worker.Worker spark://IP:PORT

Once you have started a worker, look at the master's web UI ([http://localhost:8080](http://localhost:8080) by default).
You should see the new node listed there, along with its number of CPUs and memory (minus one gigabyte left for the OS).
Expand Down Expand Up @@ -70,12 +70,12 @@ To launch a Spark standalone cluster with the launch scripts, you need to create

Once you've set up this file, you can launch or stop your cluster with the following shell scripts, based on Hadoop's deploy scripts, and available in `SPARK_HOME/bin`:

- `bin/start-master.sh` - Starts a master instance on the machine the script is executed on.
- `bin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file.
- `bin/start-all.sh` - Starts both a master and a number of slaves as described above.
- `bin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script.
- `bin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`.
- `bin/stop-all.sh` - Stops both the master and the slaves as described above.
- `sbin/start-master.sh` - Starts a master instance on the machine the script is executed on.
- `sbin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file.
- `sbin/start-all.sh` - Starts both a master and a number of slaves as described above.
- `sbin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script.
- `sbin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`.
- `sbin/stop-all.sh` - Stops both the master and the slaves as described above.

Note that these scripts must be executed on the machine you want to run the Spark master on, not your local machine.

Expand Down
5 changes: 1 addition & 4 deletions make-distribution.sh
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,7 @@ mkdir "$DISTDIR"/conf
cp "$FWDIR"/conf/*.template "$DISTDIR"/conf
cp -r "$FWDIR/bin" "$DISTDIR"
cp -r "$FWDIR/python" "$DISTDIR"
cp "$FWDIR/spark-class" "$DISTDIR"
cp "$FWDIR/spark-shell" "$DISTDIR"
cp "$FWDIR/spark-executor" "$DISTDIR"
cp "$FWDIR/pyspark" "$DISTDIR"
cp -r "$FWDIR/sbin" "$DISTDIR"


if [ "$MAKE_TGZ" == "true" ]; then
Expand Down
2 changes: 1 addition & 1 deletion python/pyspark/java_gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def launch_gateway():
# Launch the Py4j gateway using Spark's run command so that we pick up the
# proper classpath and SPARK_MEM settings from spark-env.sh
on_windows = platform.system() == "Windows"
script = "spark-class.cmd" if on_windows else "spark-class"
script = "./sbin/spark-class.cmd" if on_windows else "./sbin/spark-class"
command = [os.path.join(SPARK_HOME, script), "py4j.GatewayServer",
"--die-on-broken-pipe", "0"]
if not on_windows:
Expand Down
2 changes: 1 addition & 1 deletion python/run-tests
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ FAILED=0
rm -f unit-tests.log

function run_test() {
SPARK_TESTING=0 $FWDIR/pyspark $1 2>&1 | tee -a unit-tests.log
SPARK_TESTING=0 $FWDIR/bin/pyspark $1 2>&1 | tee -a unit-tests.log
FAILED=$((PIPESTATUS[0]||$FAILED))
}

Expand Down
3 changes: 1 addition & 2 deletions repl-bin/src/deb/bin/run
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,7 @@ fi
export JAVA_OPTS

# Build up classpath
CLASSPATH="$SPARK_CLASSPATH"
CLASSPATH+=":$FWDIR/conf"
CLASSPATH=":$FWDIR/conf"
for jar in `find $FWDIR -name '*jar'`; do
CLASSPATH+=":$jar"
done
Expand Down
1 change: 0 additions & 1 deletion repl/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@
<environmentVariables>
<SPARK_HOME>${basedir}/..</SPARK_HOME>
<SPARK_TESTING>1</SPARK_TESTING>
<SPARK_CLASSPATH>${spark.classpath}</SPARK_CLASSPATH>
</environmentVariables>
</configuration>
</plugin>
Expand Down
2 changes: 1 addition & 1 deletion bin/compute-classpath.cmd → sbin/compute-classpath.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ rem Load environment variables from conf\spark-env.cmd, if it exists
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"

rem Build up classpath
set CLASSPATH=%SPARK_CLASSPATH%;%FWDIR%conf
set CLASSPATH=%FWDIR%conf
if exist "%FWDIR%RELEASE" (
for %%d in ("%FWDIR%jars\spark-assembly*.jar") do (
set ASSEMBLY_JAR=%%d
Expand Down
2 changes: 1 addition & 1 deletion bin/compute-classpath.sh → sbin/compute-classpath.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ SCALA_VERSION=2.10
FWDIR="$(cd `dirname $0`/..; pwd)"

# Load environment variables from conf/spark-env.sh, if it exists
if [ -e $FWDIR/conf/spark-env.sh ] ; then
if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
. $FWDIR/conf/spark-env.sh
fi

Expand Down
6 changes: 3 additions & 3 deletions bin/slaves.sh → sbin/slaves.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@ if [ $# -le 0 ]; then
exit 1
fi

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

# If the slaves file is specified in the command line,
# then it takes precedence over the definition in
Expand Down
8 changes: 4 additions & 4 deletions spark-class → sbin/spark-class
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ esac
SCALA_VERSION=2.10

# Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; pwd)"
FWDIR="$(cd `dirname $0`/..; pwd)"

# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"

# Load environment variables from conf/spark-env.sh, if it exists
if [ -e $FWDIR/conf/spark-env.sh ] ; then
if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
. $FWDIR/conf/spark-env.sh
fi

Expand Down Expand Up @@ -92,7 +92,7 @@ JAVA_OPTS="$OUR_JAVA_OPTS"
JAVA_OPTS="$JAVA_OPTS -Djava.library.path=$SPARK_LIBRARY_PATH"
JAVA_OPTS="$JAVA_OPTS -Xms$SPARK_MEM -Xmx$SPARK_MEM"
# Load extra JAVA_OPTS from conf/java-opts, if it exists
if [ -e $FWDIR/conf/java-opts ] ; then
if [ -e "$FWDIR/conf/java-opts" ] ; then
JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
fi
export JAVA_OPTS
Expand Down Expand Up @@ -128,7 +128,7 @@ if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
fi

# Compute classpath using external script
CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`

if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
Expand Down
File renamed without changes.
4 changes: 2 additions & 2 deletions spark-class2.cmd → sbin/spark-class2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ rem
set SCALA_VERSION=2.10

rem Figure out where the Spark framework is installed
set FWDIR=%~dp0
set FWDIR=%~dp0..\

rem Export this as SPARK_HOME
set SPARK_HOME=%FWDIR%
Expand Down Expand Up @@ -73,7 +73,7 @@ for %%d in ("%TOOLS_DIR%\target\scala-%SCALA_VERSION%\spark-tools*assembly*.jar"

rem Compute classpath using external script
set DONT_PRINT_CLASSPATH=1
call "%FWDIR%bin\compute-classpath.cmd"
call "%FWDIR%sbin\compute-classpath.cmd"
set DONT_PRINT_CLASSPATH=0
set CLASSPATH=%CLASSPATH%;%SPARK_TOOLS_JAR%

Expand Down
File renamed without changes.
8 changes: 4 additions & 4 deletions bin/spark-daemon.sh → sbin/spark-daemon.sh
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@ if [ $# -le 1 ]; then
exit 1
fi

bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"

# get arguments

Expand Down Expand Up @@ -147,7 +147,7 @@ case $startStop in
spark_rotate_log "$log"
echo starting $command, logging to $log
cd "$SPARK_PREFIX"
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/sbin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
newpid=$!
echo $newpid > $pid
sleep 2
Expand Down
Loading

0 comments on commit 980afd2

Please sign in to comment.