Skip to content

[SPARK-4225][SQL] Resorts to SparkContext.version to inspect Spark version #3105

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 7 additions & 17 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,8 @@ import java.io._
import java.lang.management.ManagementFactory
import java.net._
import java.nio.ByteBuffer
import java.util.jar.Attributes.Name
import java.util.{Properties, Locale, Random, UUID}
import java.util.concurrent.{ThreadFactory, ConcurrentHashMap, Executors, ThreadPoolExecutor}
import java.util.jar.{Manifest => JarManifest}
import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadFactory, ThreadPoolExecutor}
import java.util.{Locale, Properties, Random, UUID}

import scala.collection.JavaConversions._
import scala.collection.Map
Expand All @@ -38,11 +36,11 @@ import com.google.common.io.{ByteStreams, Files}
import com.google.common.util.concurrent.ThreadFactoryBuilder
import org.apache.commons.lang3.SystemUtils
import org.apache.hadoop.conf.Configuration
import org.apache.log4j.PropertyConfigurator
import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
import org.apache.log4j.PropertyConfigurator
import org.eclipse.jetty.util.MultiException
import org.json4s._
import tachyon.client.{TachyonFile,TachyonFS}
import tachyon.client.{TachyonFS, TachyonFile}

import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
Expand Down Expand Up @@ -352,8 +350,8 @@ private[spark] object Utils extends Logging {
* Download a file to target directory. Supports fetching the file in a variety of ways,
* including HTTP, HDFS and files on a standard filesystem, based on the URL parameter.
*
* If `useCache` is true, first attempts to fetch the file to a local cache that's shared
* across executors running the same application. `useCache` is used mainly for
* If `useCache` is true, first attempts to fetch the file to a local cache that's shared
* across executors running the same application. `useCache` is used mainly for
* the executors, and not in local mode.
*
* Throws SparkException if the target file already exists and has different contents than
Expand Down Expand Up @@ -400,7 +398,7 @@ private[spark] object Utils extends Logging {
} else {
doFetchFile(url, targetDir, fileName, conf, securityMgr, hadoopConf)
}

// Decompress the file if it's a .tar or .tar.gz
if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
logInfo("Untarring " + fileName)
Expand Down Expand Up @@ -1775,13 +1773,6 @@ private[spark] object Utils extends Logging {
s"$libraryPathEnvName=$libraryPath$ampersand"
}

lazy val sparkVersion =
SparkContext.jarOfObject(this).map { path =>
val manifestUrl = new URL(s"jar:file:$path!/META-INF/MANIFEST.MF")
val manifest = new JarManifest(manifestUrl.openStream())
manifest.getMainAttributes.getValue(Name.IMPLEMENTATION_VERSION)
}.getOrElse("Unknown")

/**
* Return the value of a config either through the SparkConf or the Hadoop configuration
* if this is Yarn mode. In the latter case, this defaults to the value set through SparkConf
Expand All @@ -1795,7 +1786,6 @@ private[spark] object Utils extends Logging {
sparkValue
}
}

}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,16 @@

package org.apache.spark.sql.hive.thriftserver

import java.util.jar.Attributes.Name

import scala.collection.JavaConversions._

import java.io.IOException
import java.util.{List => JList}
import javax.security.auth.login.LoginException

import scala.collection.JavaConversions._

import org.apache.commons.logging.Log
import org.apache.hadoop.security.UserGroupInformation
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.shims.ShimLoader
import org.apache.hadoop.security.UserGroupInformation
import org.apache.hive.service.Service.STATE
import org.apache.hive.service.auth.HiveAuthFactory
import org.apache.hive.service.cli._
Expand All @@ -50,7 +48,7 @@ private[hive] class SparkSQLCLIService(hiveContext: HiveContext)
addService(sparkSqlSessionManager)
var sparkServiceUGI: UserGroupInformation = null

if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
if (ShimLoader.getHadoopShims.isSecurityEnabled) {
try {
HiveAuthFactory.loginFromKeytab(hiveConf)
sparkServiceUGI = ShimLoader.getHadoopShims.getUGIForConf(hiveConf)
Expand All @@ -68,7 +66,7 @@ private[hive] class SparkSQLCLIService(hiveContext: HiveContext)
getInfoType match {
case GetInfoType.CLI_SERVER_NAME => new GetInfoValue("Spark SQL")
case GetInfoType.CLI_DBMS_NAME => new GetInfoValue("Spark SQL")
case GetInfoType.CLI_DBMS_VER => new GetInfoValue(Utils.sparkVersion)
case GetInfoType.CLI_DBMS_VER => new GetInfoValue(hiveContext.sparkContext.version)
case _ => super.getInfo(sessionHandle, getInfoType)
}
}
Expand Down