Skip to content

Commit

Permalink
[SPARK-26140] followup: rename ShuffleMetricsReporter
Browse files Browse the repository at this point in the history
## What changes were proposed in this pull request?
In apache#23105, due to working on two parallel PRs at once, I made the mistake of committing the copy of the PR that used the name ShuffleMetricsReporter for the interface, rather than the appropriate one ShuffleReadMetricsReporter. This patch fixes that.

## How was this patch tested?
This should be fine as long as compilation passes.

Closes apache#23147 from rxin/ShuffleReadMetricsReporter.

Authored-by: Reynold Xin <rxin@databricks.com>
Signed-off-by: gatorsmile <gatorsmile@gmail.com>
  • Loading branch information
rxin authored and gatorsmile committed Nov 27, 2018
1 parent 9deaa72 commit c995e07
Show file tree
Hide file tree
Showing 6 changed files with 7 additions and 40 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.executor

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.shuffle.ShuffleMetricsReporter
import org.apache.spark.shuffle.ShuffleReadMetricsReporter
import org.apache.spark.util.LongAccumulator


Expand Down Expand Up @@ -130,7 +130,7 @@ class ShuffleReadMetrics private[spark] () extends Serializable {
* shuffle dependency, and all temporary metrics will be merged into the [[ShuffleReadMetrics]] at
* last.
*/
private[spark] class TempShuffleReadMetrics extends ShuffleMetricsReporter {
private[spark] class TempShuffleReadMetrics extends ShuffleReadMetricsReporter {
private[this] var _remoteBlocksFetched = 0L
private[this] var _localBlocksFetched = 0L
private[this] var _remoteBytesRead = 0L
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ private[spark] class BlockStoreShuffleReader[K, C](
startPartition: Int,
endPartition: Int,
context: TaskContext,
readMetrics: ShuffleMetricsReporter,
readMetrics: ShuffleReadMetricsReporter,
serializerManager: SerializerManager = SparkEnv.get.serializerManager,
blockManager: BlockManager = SparkEnv.get.blockManager,
mapOutputTracker: MapOutputTracker = SparkEnv.get.mapOutputTracker)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ private[spark] trait ShuffleManager {
startPartition: Int,
endPartition: Int,
context: TaskContext,
metrics: ShuffleMetricsReporter): ShuffleReader[K, C]
metrics: ShuffleReadMetricsReporter): ShuffleReader[K, C]

/**
* Remove a shuffle's metadata from the ShuffleManager.
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ private[spark] class SortShuffleManager(conf: SparkConf) extends ShuffleManager
startPartition: Int,
endPartition: Int,
context: TaskContext,
metrics: ShuffleMetricsReporter): ShuffleReader[K, C] = {
metrics: ShuffleReadMetricsReporter): ShuffleReader[K, C] = {
new BlockStoreShuffleReader(
handle.asInstanceOf[BaseShuffleHandle[K, _, C]],
startPartition, endPartition, context, metrics)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import org.apache.spark.internal.Logging
import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer}
import org.apache.spark.network.shuffle._
import org.apache.spark.network.util.TransportConf
import org.apache.spark.shuffle.{FetchFailedException, ShuffleMetricsReporter}
import org.apache.spark.shuffle.{FetchFailedException, ShuffleReadMetricsReporter}
import org.apache.spark.util.Utils
import org.apache.spark.util.io.ChunkedByteBufferOutputStream

Expand Down Expand Up @@ -73,7 +73,7 @@ final class ShuffleBlockFetcherIterator(
maxBlocksInFlightPerAddress: Int,
maxReqSizeShuffleToMem: Long,
detectCorrupt: Boolean,
shuffleMetrics: ShuffleMetricsReporter)
shuffleMetrics: ShuffleReadMetricsReporter)
extends Iterator[(BlockId, InputStream)] with DownloadFileManager with Logging {

import ShuffleBlockFetcherIterator._
Expand Down

0 comments on commit c995e07

Please sign in to comment.