Skip to content

Commit

Permalink
[SPARK-3273][SPARK-3301]We should read the version information from t…
Browse files Browse the repository at this point in the history
…he same place

Author: GuoQiang Li <witgo@qq.com>

Closes apache#2175 from witgo/SPARK-3273 and squashes the following commits:

cf9c65a [GuoQiang Li] We should read the version information from the same place
2a44e2f [GuoQiang Li] The spark version in the welcome message of pyspark is not correct
  • Loading branch information
witgo authored and JoshRosen committed Sep 6, 2014
1 parent 607ae39 commit 21a1e1b
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 10 deletions.
5 changes: 2 additions & 3 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, SparkD
import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
import org.apache.spark.scheduler.local.LocalBackend
import org.apache.spark.storage._
import org.apache.spark.SPARK_VERSION
import org.apache.spark.ui.SparkUI
import org.apache.spark.util.{CallSite, ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedWeakValueHashMap, Utils}

Expand Down Expand Up @@ -825,7 +826,7 @@ class SparkContext(config: SparkConf) extends Logging {
}

/** The version of Spark on which this application is running. */
def version = SparkContext.SPARK_VERSION
def version = SPARK_VERSION

/**
* Return a map from the slave to the max memory available for caching and the remaining
Expand Down Expand Up @@ -1297,8 +1298,6 @@ class SparkContext(config: SparkConf) extends Logging {
*/
object SparkContext extends Logging {

private[spark] val SPARK_VERSION = "1.2.0-SNAPSHOT"

private[spark] val SPARK_JOB_DESCRIPTION = "spark.job.description"

private[spark] val SPARK_JOB_GROUP_ID = "spark.jobGroup.id"
Expand Down
1 change: 1 addition & 0 deletions core/src/main/scala/org/apache/spark/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,5 @@ package org.apache

package object spark {
// For package docs only
val SPARK_VERSION = "1.2.0-SNAPSHOT"
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import org.json4s.jackson.JsonMethods._
import org.apache.spark.{Logging, SparkConf, SparkContext}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.io.CompressionCodec
import org.apache.spark.SPARK_VERSION
import org.apache.spark.util.{FileLogger, JsonProtocol, Utils}

/**
Expand Down Expand Up @@ -86,7 +87,7 @@ private[spark] class EventLoggingListener(
sparkConf.get("spark.io.compression.codec", CompressionCodec.DEFAULT_COMPRESSION_CODEC)
logger.newFile(COMPRESSION_CODEC_PREFIX + codec)
}
logger.newFile(SPARK_VERSION_PREFIX + SparkContext.SPARK_VERSION)
logger.newFile(SPARK_VERSION_PREFIX + SPARK_VERSION)
logger.newFile(LOG_PREFIX + logger.fileIndex)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.io.CompressionCodec
import org.apache.spark.SPARK_VERSION
import org.apache.spark.util.{JsonProtocol, Utils}

import java.io.File
Expand Down Expand Up @@ -196,7 +197,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {

def assertInfoCorrect(info: EventLoggingInfo, loggerStopped: Boolean) {
assert(info.logPaths.size > 0)
assert(info.sparkVersion === SparkContext.SPARK_VERSION)
assert(info.sparkVersion === SPARK_VERSION)
assert(info.compressionCodec.isDefined === compressionCodec.isDefined)
info.compressionCodec.foreach { codec =>
assert(compressionCodec.isDefined)
Expand Down Expand Up @@ -381,7 +382,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {
private def assertSparkVersionIsValid(logFiles: Array[FileStatus]) {
val file = logFiles.map(_.getPath.getName).find(EventLoggingListener.isSparkVersionFile)
assert(file.isDefined)
assert(EventLoggingListener.parseSparkVersion(file.get) === SparkContext.SPARK_VERSION)
assert(EventLoggingListener.parseSparkVersion(file.get) === SPARK_VERSION)
}

private def assertCompressionCodecIsValid(logFiles: Array[FileStatus], compressionCodec: String) {
Expand Down
4 changes: 2 additions & 2 deletions python/pyspark/shell.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/__ / .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
/__ / .__/\_,_/_/ /_/\_\ version %s
/_/
""")
""" % sc.version)
print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ import scala.reflect.internal.util.Position
import scala.util.control.Exception.ignoring
import scala.tools.nsc.util.stackTraceString

import org.apache.spark.SPARK_VERSION

/**
* Machinery for the asynchronous initialization of the repl.
*/
Expand All @@ -26,9 +28,9 @@ trait SparkILoopInit {
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
/___/ .__/\_,_/_/ /_/\_\ version %s
/_/
""")
""".format(SPARK_VERSION))
import Properties._
val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
versionString, javaVmName, javaVersion)
Expand Down

0 comments on commit 21a1e1b

Please sign in to comment.