Skip to content

Commit ee6a0e1

Browse files
committed
[SPARK-7927] whitespace fixes for Hive and ThriftServer.
So we can enable a whitespace enforcement rule in the style checker to save code review time. Author: Reynold Xin <rxin@databricks.com> Closes #6478 from rxin/whitespace-hive and squashes the following commits: e01b0e0 [Reynold Xin] Fixed tests. a3bba22 [Reynold Xin] [SPARK-7927] whitespace fixes for Hive and ThriftServer.
1 parent 3af0b31 commit ee6a0e1

File tree

14 files changed

+43
-39
lines changed

14 files changed

+43
-39
lines changed

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ import org.apache.spark.util.Utils
4343
private[hive] object SparkSQLCLIDriver {
4444
private var prompt = "spark-sql"
4545
private var continuedPrompt = "".padTo(prompt.length, ' ')
46-
private var transport:TSocket = _
46+
private var transport: TSocket = _
4747

4848
installSignalHandler()
4949

@@ -276,13 +276,13 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
276276

277277
driver.init()
278278
val out = sessionState.out
279-
val start:Long = System.currentTimeMillis()
279+
val start: Long = System.currentTimeMillis()
280280
if (sessionState.getIsVerbose) {
281281
out.println(cmd)
282282
}
283283
val rc = driver.run(cmd)
284284
val end = System.currentTimeMillis()
285-
val timeTaken:Double = (end - start) / 1000.0
285+
val timeTaken: Double = (end - start) / 1000.0
286286

287287
ret = rc.getResponseCode
288288
if (ret != 0) {
@@ -310,7 +310,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
310310
res.clear()
311311
}
312312
} catch {
313-
case e:IOException =>
313+
case e: IOException =>
314314
console.printError(
315315
s"""Failed with exception ${e.getClass.getName}: ${e.getMessage}
316316
|${org.apache.hadoop.util.StringUtils.stringifyException(e)}

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/ui/ThriftServerPage.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,15 +77,15 @@ private[ui] class ThriftServerPage(parent: ThriftServerTab) extends WebUIPage(""
7777
[{id}]
7878
</a>
7979
}
80-
val detail = if(info.state == ExecutionState.FAILED) info.detail else info.executePlan
80+
val detail = if (info.state == ExecutionState.FAILED) info.detail else info.executePlan
8181
<tr>
8282
<td>{info.userName}</td>
8383
<td>
8484
{jobLink}
8585
</td>
8686
<td>{info.groupId}</td>
8787
<td>{formatDate(info.startTimestamp)}</td>
88-
<td>{if(info.finishTimestamp > 0) formatDate(info.finishTimestamp)}</td>
88+
<td>{if (info.finishTimestamp > 0) formatDate(info.finishTimestamp)}</td>
8989
<td>{formatDurationOption(Some(info.totalTime))}</td>
9090
<td>{info.statement}</td>
9191
<td>{info.state}</td>
@@ -150,7 +150,7 @@ private[ui] class ThriftServerPage(parent: ThriftServerTab) extends WebUIPage(""
150150
<td> {session.ip} </td>
151151
<td> <a href={sessionLink}> {session.sessionId} </a> </td>
152152
<td> {formatDate(session.startTimestamp)} </td>
153-
<td> {if(session.finishTimestamp > 0) formatDate(session.finishTimestamp)} </td>
153+
<td> {if (session.finishTimestamp > 0) formatDate(session.finishTimestamp)} </td>
154154
<td> {formatDurationOption(Some(session.totalTime))} </td>
155155
<td> {session.totalExecution.toString} </td>
156156
</tr>

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/ui/ThriftServerSessionPage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ private[ui] class ThriftServerSessionPage(parent: ThriftServerTab)
8787
[{id}]
8888
</a>
8989
}
90-
val detail = if(info.state == ExecutionState.FAILED) info.detail else info.executePlan
90+
val detail = if (info.state == ExecutionState.FAILED) info.detail else info.executePlan
9191
<tr>
9292
<td>{info.userName}</td>
9393
<td>

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ class UISeleniumSuite
7373
}
7474

7575
ignore("thrift server ui test") {
76-
withJdbcStatement(statement =>{
76+
withJdbcStatement { statement =>
7777
val baseURL = s"http://localhost:$uiPort"
7878

7979
val queries = Seq(
@@ -97,6 +97,6 @@ class UISeleniumSuite
9797
findAll(cssSelector("""ul table tbody tr td""")).map(_.text).toList should contain (line)
9898
}
9999
}
100-
})
100+
}
101101
}
102102
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/ExtendedHiveQlParser.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@ import org.apache.spark.sql.hive.execution.{AddJar, AddFile, HiveNativeCommand}
2929
private[hive] class ExtendedHiveQlParser extends AbstractSparkSQLParser {
3030
// Keyword is a convention with AbstractSparkSQLParser, which will scan all of the `Keyword`
3131
// properties via reflection the class in runtime for constructing the SqlLexical object
32-
protected val ADD = Keyword("ADD")
33-
protected val DFS = Keyword("DFS")
32+
protected val ADD = Keyword("ADD")
33+
protected val DFS = Keyword("DFS")
3434
protected val FILE = Keyword("FILE")
35-
protected val JAR = Keyword("JAR")
35+
protected val JAR = Keyword("JAR")
3636

3737
protected lazy val start: Parser[LogicalPlan] = dfs | addJar | addFile | hiveQl
3838

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -530,7 +530,7 @@ private[hive] object HiveContext {
530530
val propMap: HashMap[String, String] = HashMap()
531531
// We have to mask all properties in hive-site.xml that relates to metastore data source
532532
// as we used a local metastore here.
533-
HiveConf.ConfVars.values().foreach { confvar =>
533+
HiveConf.ConfVars.values().foreach { confvar =>
534534
if (confvar.varname.contains("datanucleus") || confvar.varname.contains("jdo")) {
535535
propMap.put(confvar.varname, confvar.defaultVal)
536536
}
@@ -553,7 +553,7 @@ private[hive] object HiveContext {
553553
}.mkString("{", ",", "}")
554554
case (seq: Seq[_], ArrayType(typ, _)) =>
555555
seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
556-
case (map: Map[_,_], MapType(kType, vType, _)) =>
556+
case (map: Map[_, _], MapType(kType, vType, _)) =>
557557
map.map {
558558
case (key, value) =>
559559
toHiveStructString((key, kType)) + ":" + toHiveStructString((value, vType))

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -335,7 +335,7 @@ private[hive] trait HiveInspectors {
335335
val allRefs = si.getAllStructFieldRefs
336336
new GenericRow(
337337
allRefs.map(r =>
338-
unwrap(si.getStructFieldData(data,r), r.getFieldObjectInspector)).toArray)
338+
unwrap(si.getStructFieldData(data, r), r.getFieldObjectInspector)).toArray)
339339
}
340340

341341

@@ -561,8 +561,8 @@ private[hive] trait HiveInspectors {
561561
case DecimalType() => PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector
562562
case StructType(fields) =>
563563
ObjectInspectorFactory.getStandardStructObjectInspector(
564-
java.util.Arrays.asList(fields.map(f => f.name) :_*),
565-
java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) :_*))
564+
java.util.Arrays.asList(fields.map(f => f.name) : _*),
565+
java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) : _*))
566566
}
567567

568568
/**
@@ -677,8 +677,8 @@ private[hive] trait HiveInspectors {
677677
getListTypeInfo(elemType.toTypeInfo)
678678
case StructType(fields) =>
679679
getStructTypeInfo(
680-
java.util.Arrays.asList(fields.map(_.name) :_*),
681-
java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo) :_*))
680+
java.util.Arrays.asList(fields.map(_.name) : _*),
681+
java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo) : _*))
682682
case MapType(keyType, valueType, _) =>
683683
getMapTypeInfo(keyType.toTypeInfo, valueType.toTypeInfo)
684684
case BinaryType => binaryTypeInfo

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -546,13 +546,17 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
546546
* UNIMPLEMENTED: It needs to be decided how we will persist in-memory tables to the metastore.
547547
* For now, if this functionality is desired mix in the in-memory [[OverrideCatalog]].
548548
*/
549-
override def registerTable(tableIdentifier: Seq[String], plan: LogicalPlan): Unit = ???
549+
override def registerTable(tableIdentifier: Seq[String], plan: LogicalPlan): Unit = {
550+
throw new UnsupportedOperationException
551+
}
550552

551553
/**
552554
* UNIMPLEMENTED: It needs to be decided how we will persist in-memory tables to the metastore.
553555
* For now, if this functionality is desired mix in the in-memory [[OverrideCatalog]].
554556
*/
555-
override def unregisterTable(tableIdentifier: Seq[String]): Unit = ???
557+
override def unregisterTable(tableIdentifier: Seq[String]): Unit = {
558+
throw new UnsupportedOperationException
559+
}
556560

557561
override def unregisterAllTables(): Unit = {}
558562
}
@@ -725,7 +729,7 @@ private[hive] case class MetastoreRelation
725729
val output = attributes ++ partitionKeys
726730

727731
/** An attribute map that can be used to lookup original attributes based on expression id. */
728-
val attributeMap = AttributeMap(output.map(o => (o,o)))
732+
val attributeMap = AttributeMap(output.map(o => (o, o)))
729733

730734
/** An attribute map for determining the ordinal for non-partition columns. */
731735
val columnOrdinals = AttributeMap(attributes.zipWithIndex)

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -665,7 +665,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
665665
HiveColumn(field.getName, field.getType, field.getComment)
666666
})
667667
}
668-
case Token("TOK_TABLEROWFORMAT", Token("TOK_SERDEPROPS", child :: Nil) :: Nil)=>
668+
case Token("TOK_TABLEROWFORMAT", Token("TOK_SERDEPROPS", child :: Nil) :: Nil) =>
669669
val serdeParams = new java.util.HashMap[String, String]()
670670
child match {
671671
case Token("TOK_TABLEROWFORMATFIELD", rowChild1 :: rowChild2) =>
@@ -775,7 +775,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
775775

776776
// Support "TRUNCATE TABLE table_name [PARTITION partition_spec]"
777777
case Token("TOK_TRUNCATETABLE",
778-
Token("TOK_TABLE_PARTITION",table)::Nil) => NativePlaceholder
778+
Token("TOK_TABLE_PARTITION", table) :: Nil) => NativePlaceholder
779779

780780
case Token("TOK_QUERY", queryArgs)
781781
if Seq("TOK_FROM", "TOK_INSERT").contains(queryArgs.head.getText) =>
@@ -1151,7 +1151,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
11511151
case Seq(false, false) => Inner
11521152
}.toBuffer
11531153

1154-
val joinedTables = tables.reduceLeft(Join(_,_, Inner, None))
1154+
val joinedTables = tables.reduceLeft(Join(_, _, Inner, None))
11551155

11561156
// Must be transform down.
11571157
val joinedResult = joinedTables transform {
@@ -1171,7 +1171,8 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
11711171
// worth the number of hacks that will be required to implement it. Namely, we need to add
11721172
// some sort of mapped star expansion that would expand all child output row to be similarly
11731173
// named output expressions where some aggregate expression has been applied (i.e. First).
1174-
??? // Aggregate(groups, Star(None, First(_)) :: Nil, joinedResult)
1174+
// Aggregate(groups, Star(None, First(_)) :: Nil, joinedResult)
1175+
throw new UnsupportedOperationException
11751176

11761177
case Token(allJoinTokens(joinToken),
11771178
relation1 ::

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -194,10 +194,9 @@ case class InsertIntoHiveTable(
194194
if (partition.nonEmpty) {
195195

196196
// loadPartition call orders directories created on the iteration order of the this map
197-
val orderedPartitionSpec = new util.LinkedHashMap[String,String]()
198-
table.hiveQlTable.getPartCols().foreach{
199-
entry=>
200-
orderedPartitionSpec.put(entry.getName,partitionSpec.get(entry.getName).getOrElse(""))
197+
val orderedPartitionSpec = new util.LinkedHashMap[String, String]()
198+
table.hiveQlTable.getPartCols().foreach { entry =>
199+
orderedPartitionSpec.put(entry.getName, partitionSpec.get(entry.getName).getOrElse(""))
201200
}
202201
val partVals = MetaStoreUtils.getPvals(table.hiveQlTable.getPartCols, partitionSpec)
203202

0 commit comments

Comments
 (0)