Skip to content

Commit d188b8b

Browse files
scwfrxin
authored andcommitted
[SQL][Minor] rename DataTypeParser.apply to DataTypeParser.parse
rename DataTypeParser.apply to DataTypeParser.parse to make it more clear and readable. /cc rxin Author: wangfei <wangfei1@huawei.com> Closes apache#5710 from scwf/apply and squashes the following commits: c319977 [wangfei] rename apply to parse
1 parent ca55dc9 commit d188b8b

File tree

5 files changed

+6
-6
lines changed

5 files changed

+6
-6
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ object PhysicalOperation extends PredicateHelper {
9292
}
9393

9494
def collectAliases(fields: Seq[Expression]): Map[Attribute, Expression] = fields.collect {
95-
case a @ Alias(child, _) => a.toAttribute.asInstanceOf[Attribute] -> child
95+
case a @ Alias(child, _) => a.toAttribute -> child
9696
}.toMap
9797

9898
def substitute(aliases: Map[Attribute, Expression])(expr: Expression): Expression = {

sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ private[sql] object DataTypeParser {
108108
override val lexical = new SqlLexical
109109
}
110110

111-
def apply(dataTypeString: String): DataType = dataTypeParser.toDataType(dataTypeString)
111+
def parse(dataTypeString: String): DataType = dataTypeParser.toDataType(dataTypeString)
112112
}
113113

114114
/** The exception thrown from the [[DataTypeParser]]. */

sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeParserSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,13 @@ class DataTypeParserSuite extends FunSuite {
2323

2424
def checkDataType(dataTypeString: String, expectedDataType: DataType): Unit = {
2525
test(s"parse ${dataTypeString.replace("\n", "")}") {
26-
assert(DataTypeParser(dataTypeString) === expectedDataType)
26+
assert(DataTypeParser.parse(dataTypeString) === expectedDataType)
2727
}
2828
}
2929

3030
def unsupported(dataTypeString: String): Unit = {
3131
test(s"$dataTypeString is not supported") {
32-
intercept[DataTypeException](DataTypeParser(dataTypeString))
32+
intercept[DataTypeException](DataTypeParser.parse(dataTypeString))
3333
}
3434
}
3535

sql/core/src/main/scala/org/apache/spark/sql/Column.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -647,7 +647,7 @@ class Column(protected[sql] val expr: Expression) extends Logging {
647647
*
648648
* @group expr_ops
649649
*/
650-
def cast(to: String): Column = cast(DataTypeParser(to))
650+
def cast(to: String): Column = cast(DataTypeParser.parse(to))
651651

652652
/**
653653
* Returns an ordering used in sorting.

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -871,7 +871,7 @@ private[hive] case class MetastoreRelation
871871

872872

873873
private[hive] object HiveMetastoreTypes {
874-
def toDataType(metastoreType: String): DataType = DataTypeParser(metastoreType)
874+
def toDataType(metastoreType: String): DataType = DataTypeParser.parse(metastoreType)
875875

876876
def toMetastoreType(dt: DataType): String = dt match {
877877
case ArrayType(elementType, _) => s"array<${toMetastoreType(elementType)}>"

0 commit comments

Comments
 (0)