Skip to content

Commit e8f1846

Browse files
committed
fix commands that need hive to exec
1 parent 715a19d commit e8f1846

File tree

4 files changed

+48
-24
lines changed

4 files changed

+48
-24
lines changed

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -288,8 +288,11 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
288288
val tokens: Array[String] = cmd_trimmed.split("\\s+")
289289
val cmd_1: String = cmd_trimmed.substring(tokens(0).length()).trim()
290290
if (cmd_lower.equals("quit") ||
291-
cmd_lower.equals("exit") ||
292-
tokens(0).toLowerCase(Locale.ENGLISH).equals("source") ||
291+
cmd_lower.equals("exit")) {
292+
sessionState.close()
293+
System.exit(0)
294+
}
295+
if (tokens(0).toLowerCase(Locale.ENGLISH).equals("source") ||
293296
cmd_trimmed.startsWith("!") ||
294297
tokens(0).toLowerCase.equals("list") ||
295298
isRemoteMode) {

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -234,4 +234,9 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
234234
-> "Error in query: Table not found: nonexistent_table;"
235235
)
236236
}
237+
238+
test("SPARK-11624 Spark SQL CLI should set sessionState only once") {
239+
runCliWithin(2.minute, Seq())(
240+
"!echo \"This is a test for Spark-11624\"" -> "This is a test for Spark-11624")
241+
}
237242
}

sql/hive/pom.xml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,12 @@
7272
<artifactId>protobuf-java</artifactId>
7373
<version>${protobuf.version}</version>
7474
</dependency>
75+
-->
76+
<dependency>
77+
<groupId>${hive.group}</groupId>
78+
<artifactId>hive-cli</artifactId>
79+
</dependency>
80+
<!--
7581
<dependency>
7682
<groupId>${hive.group}</groupId>
7783
<artifactId>hive-common</artifactId>

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala

Lines changed: 32 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -24,14 +24,14 @@ import scala.collection.JavaConverters._
2424
import scala.language.reflectiveCalls
2525

2626
import org.apache.hadoop.fs.Path
27+
import org.apache.hadoop.hive.cli.CliSessionState
2728
import org.apache.hadoop.hive.conf.HiveConf
2829
import org.apache.hadoop.hive.metastore.{TableType => HTableType}
2930
import org.apache.hadoop.hive.metastore.api.{Database, FieldSchema}
3031
import org.apache.hadoop.hive.ql.{metadata, Driver}
3132
import org.apache.hadoop.hive.ql.metadata.Hive
3233
import org.apache.hadoop.hive.ql.processors._
3334
import org.apache.hadoop.hive.ql.session.SessionState
34-
import org.apache.hadoop.hive.shims.{HadoopShims, ShimLoader}
3535
import org.apache.hadoop.security.UserGroupInformation
3636

3737
import org.apache.spark.{Logging, SparkConf, SparkException}
@@ -104,29 +104,39 @@ private[hive] class HiveClientImpl(
104104
}
105105

106106
val ret = try {
107-
val initialConf = new HiveConf(classOf[SessionState])
108-
// HiveConf is a Hadoop Configuration, which has a field of classLoader and
109-
// the initial value will be the current thread's context class loader
110-
// (i.e. initClassLoader at here).
111-
// We call initialConf.setClassLoader(initClassLoader) at here to make
112-
// this action explicit.
113-
initialConf.setClassLoader(initClassLoader)
114-
config.foreach { case (k, v) =>
115-
if (k.toLowerCase.contains("password")) {
116-
logDebug(s"Hive Config: $k=xxx")
117-
} else {
118-
logDebug(s"Hive Config: $k=$v")
107+
// originState will be created if not exists, will never be null
108+
val originalState = SessionState.get()
109+
if (originalState.isInstanceOf[CliSessionState]) {
110+
// In `SparkSQLCLIDriver`, we have already started a `CliSessionState`,
111+
// which contains information like configurations from command line. Later
112+
// we call `SparkSQLEnv.init()` there, which would run into this part again.
113+
// so we should keep `conf` and reuse the existing instance of `CliSessionState`.
114+
originalState
115+
} else {
116+
val initialConf = new HiveConf(classOf[SessionState])
117+
// HiveConf is a Hadoop Configuration, which has a field of classLoader and
118+
// the initial value will be the current thread's context class loader
119+
// (i.e. initClassLoader at here).
120+
// We call initialConf.setClassLoader(initClassLoader) at here to make
121+
// this action explicit.
122+
initialConf.setClassLoader(initClassLoader)
123+
config.foreach { case (k, v) =>
124+
if (k.toLowerCase.contains("password")) {
125+
logDebug(s"Hive Config: $k=xxx")
126+
} else {
127+
logDebug(s"Hive Config: $k=$v")
128+
}
129+
initialConf.set(k, v)
119130
}
120-
initialConf.set(k, v)
121-
}
122-
val state = new SessionState(initialConf)
123-
if (clientLoader.cachedHive != null) {
124-
Hive.set(clientLoader.cachedHive.asInstanceOf[Hive])
131+
val state = new SessionState(initialConf)
132+
if (clientLoader.cachedHive != null) {
133+
Hive.set(clientLoader.cachedHive.asInstanceOf[Hive])
134+
}
135+
SessionState.start(state)
136+
state.out = new PrintStream(outputBuffer, true, "UTF-8")
137+
state.err = new PrintStream(outputBuffer, true, "UTF-8")
138+
state
125139
}
126-
SessionState.start(state)
127-
state.out = new PrintStream(outputBuffer, true, "UTF-8")
128-
state.err = new PrintStream(outputBuffer, true, "UTF-8")
129-
state
130140
} finally {
131141
Thread.currentThread().setContextClassLoader(original)
132142
}

0 commit comments

Comments
 (0)