Skip to content

Commit 74dd5c7

Browse files
committed
revise naming
1 parent 751495d commit 74dd5c7

File tree

4 files changed

+7
-7
lines changed

4 files changed

+7
-7
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2909,7 +2909,7 @@ private[spark] object Utils extends Logging {
29092909
* Convert a sequence of [[Path]] to a metadata string. When the length of metadata string
29102910
* exceeds `stopAppendingThreshold`, stop appending paths for saving memory.
29112911
*/
2912-
def pathsToMetadata(paths: Seq[Path], stopAppendingThreshold: Int): String = {
2912+
def buildLocationMetadata(paths: Seq[Path], stopAppendingThreshold: Int): String = {
29132913
var metadata = "["
29142914
var index: Int = 0
29152915
while (index < paths.length && metadata.length <= stopAppendingThreshold) {

core/src/test/scala/org/apache/spark/util/UtilsSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1304,10 +1304,10 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
13041304

13051305
test("pathsToMetadata") {
13061306
val paths = (0 to 4).map(i => new Path(s"path$i"))
1307-
assert(Utils.pathsToMetadata(paths, 1) == "[path0]")
1308-
assert(Utils.pathsToMetadata(paths, 10) == "[path0, path1]")
1309-
assert(Utils.pathsToMetadata(paths, 15) == "[path0, path1, path2]")
1310-
assert(Utils.pathsToMetadata(paths, 20) == "[path0, path1, path2, path3]")
1307+
assert(Utils.buildLocationMetadata(paths, 1) == "[path0]")
1308+
assert(Utils.buildLocationMetadata(paths, 10) == "[path0, path1]")
1309+
assert(Utils.buildLocationMetadata(paths, 15) == "[path0, path1, path2]")
1310+
assert(Utils.buildLocationMetadata(paths, 20) == "[path0, path1, path2, path3]")
13111311
}
13121312
}
13131313

sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -338,7 +338,7 @@ case class FileSourceScanExec(
338338
val location = relation.location
339339
val locationDesc =
340340
location.getClass.getSimpleName +
341-
Utils.pathsToMetadata(location.rootPaths, maxMetadataValueLength)
341+
Utils.buildLocationMetadata(location.rootPaths, maxMetadataValueLength)
342342
val metadata =
343343
Map(
344344
"Format" -> relation.fileFormat.toString,

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileScan.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ trait FileScan extends Scan with Batch with SupportsReportStatistics with Loggin
9797
val maxMetadataValueLength = 100
9898
val locationDesc =
9999
fileIndex.getClass.getSimpleName +
100-
Utils.pathsToMetadata(fileIndex.rootPaths, maxMetadataValueLength)
100+
Utils.buildLocationMetadata(fileIndex.rootPaths, maxMetadataValueLength)
101101
val metadata: Map[String, String] = Map(
102102
"ReadSchema" -> readDataSchema.catalogString,
103103
"PartitionFilters" -> seqToString(partitionFilters),

0 commit comments

Comments
 (0)