Skip to content

Commit 2022ad4

Browse files
committed
[SPARK-5846] Correctly set job description and pool for SQL jobs
1 parent 9f31db0 commit 2022ad4

File tree

2 files changed

+8
-8
lines changed
  • sql/hive-thriftserver
    • v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver
    • v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver

2 files changed

+8
-8
lines changed

sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,10 @@ private[hive] class SparkExecuteStatementOperation(
185185
def run(): Unit = {
186186
logInfo(s"Running query '$statement'")
187187
setState(OperationState.RUNNING)
188+
hiveContext.sparkContext.setJobDescription(statement)
189+
sessionToActivePool.get(parentSession.getSessionHandle).foreach { pool =>
190+
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
191+
}
188192
try {
189193
result = hiveContext.sql(statement)
190194
logDebug(result.queryExecution.toString())
@@ -194,10 +198,6 @@ private[hive] class SparkExecuteStatementOperation(
194198
logInfo(s"Setting spark.scheduler.pool=$value for future statements in this session.")
195199
case _ =>
196200
}
197-
hiveContext.sparkContext.setJobDescription(statement)
198-
sessionToActivePool.get(parentSession.getSessionHandle).foreach { pool =>
199-
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
200-
}
201201
iter = {
202202
val useIncrementalCollect =
203203
hiveContext.getConf("spark.sql.thriftServer.incrementalCollect", "false").toBoolean

sql/hive-thriftserver/v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim13.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,10 @@ private[hive] class SparkExecuteStatementOperation(
156156
def run(): Unit = {
157157
logInfo(s"Running query '$statement'")
158158
setState(OperationState.RUNNING)
159+
hiveContext.sparkContext.setJobDescription(statement)
160+
sessionToActivePool.get(parentSession.getSessionHandle).foreach { pool =>
161+
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
162+
}
159163
try {
160164
result = hiveContext.sql(statement)
161165
logDebug(result.queryExecution.toString())
@@ -165,10 +169,6 @@ private[hive] class SparkExecuteStatementOperation(
165169
logInfo(s"Setting spark.scheduler.pool=$value for future statements in this session.")
166170
case _ =>
167171
}
168-
hiveContext.sparkContext.setJobDescription(statement)
169-
sessionToActivePool.get(parentSession.getSessionHandle).foreach { pool =>
170-
hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
171-
}
172172
iter = {
173173
val useIncrementalCollect =
174174
hiveContext.getConf("spark.sql.thriftServer.incrementalCollect", "false").toBoolean

0 commit comments

Comments
 (0)