Skip to content

Commit 53be2c5

Browse files
committed
Minor style updates.
1 parent aef4dd5 commit 53be2c5

File tree

4 files changed

+40
-25
lines changed

4 files changed

+40
-25
lines changed

streaming/src/main/scala/org/apache/spark/streaming/dstream/NetworkInputDStream.scala

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -184,15 +184,25 @@ abstract class NetworkReceiver[T: ClassTag]() extends Serializable with Logging
184184
/**
185185
* Pushes a block (as an ArrayBuffer filled with data) into the block manager.
186186
*/
187-
def pushBlock(blockId: StreamBlockId, arrayBuffer: ArrayBuffer[T], metadata: Any, level: StorageLevel) {
187+
def pushBlock(
188+
blockId: StreamBlockId,
189+
arrayBuffer: ArrayBuffer[T],
190+
metadata: Any,
191+
level: StorageLevel
192+
) {
188193
env.blockManager.put(blockId, arrayBuffer.asInstanceOf[ArrayBuffer[Any]], level)
189194
actor ! ReportBlock(blockId, arrayBuffer.size, metadata)
190195
}
191196

192197
/**
193198
* Pushes a block (as bytes) into the block manager.
194199
*/
195-
def pushBlock(blockId: StreamBlockId, bytes: ByteBuffer, metadata: Any, level: StorageLevel) {
200+
def pushBlock(
201+
blockId: StreamBlockId,
202+
bytes: ByteBuffer,
203+
metadata: Any,
204+
level: StorageLevel
205+
) {
196206
env.blockManager.putBytes(blockId, bytes, level)
197207
actor ! ReportBlock(blockId, -1 , metadata)
198208
}

streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingUI.scala

Lines changed: 25 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,13 @@ private[ui] class StreamingUIListener(ssc: StreamingContext) extends StreamingLi
105105
val latestBatchInfos = allBatches.reverse.take(batchInfoLimit)
106106
val latestBlockInfos = latestBatchInfos.map(_.receivedBlockInfo)
107107
(0 until numNetworkReceivers).map { receiverId =>
108-
val blockInfoOfParticularReceiver = latestBlockInfos.map(_.get(receiverId).getOrElse(Array.empty))
109-
val recordsOfParticularReceiver = blockInfoOfParticularReceiver.map(_.map(_.numRecords).sum.toDouble * 1000 / batchDuration)
108+
val blockInfoOfParticularReceiver = latestBlockInfos.map { batchInfo =>
109+
batchInfo.get(receiverId).getOrElse(Array.empty)
110+
}
111+
val recordsOfParticularReceiver = blockInfoOfParticularReceiver.map { blockInfo =>
112+
// calculate records per second for each batch
113+
blockInfo.map(_.numRecords).sum.toDouble * 1000 / batchDuration
114+
}
110115
val distributionOption = Distribution(recordsOfParticularReceiver)
111116
(receiverId, distributionOption)
112117
}.toMap
@@ -231,16 +236,24 @@ private[ui] class StreamingPage(parent: StreamingUI) extends Logging {
231236
val numBatches = listener.completedBatches.size
232237
val lastCompletedBatch = listener.lastCompletedBatch
233238
val table = if (numBatches > 0) {
234-
val processingDelayQuantilesRow =
235-
Seq("Processing Time", msDurationToString(lastCompletedBatch.flatMap(_.processingDelay))) ++
236-
getQuantiles(listener.processingDelayDistribution)
237-
val schedulingDelayQuantilesRow =
238-
Seq("Scheduling Delay", msDurationToString(lastCompletedBatch.flatMap(_.schedulingDelay))) ++
239-
getQuantiles(listener.schedulingDelayDistribution)
240-
val totalDelayQuantilesRow =
241-
Seq("Total Delay", msDurationToString(lastCompletedBatch.flatMap(_.totalDelay))) ++
242-
getQuantiles(listener.totalDelayDistribution)
243-
239+
val processingDelayQuantilesRow = {
240+
Seq(
241+
"Processing Time",
242+
msDurationToString(lastCompletedBatch.flatMap(_.processingDelay))
243+
) ++ getQuantiles(listener.processingDelayDistribution)
244+
}
245+
val schedulingDelayQuantilesRow = {
246+
Seq(
247+
"Scheduling Delay",
248+
msDurationToString(lastCompletedBatch.flatMap(_.schedulingDelay))
249+
) ++ getQuantiles(listener.schedulingDelayDistribution)
250+
}
251+
val totalDelayQuantilesRow = {
252+
Seq(
253+
"Total Delay",
254+
msDurationToString(lastCompletedBatch.flatMap(_.totalDelay))
255+
) ++ getQuantiles(listener.totalDelayDistribution)
256+
}
244257
val headerRow = Seq("Metric", "Last batch", "Minimum", "25th percentile",
245258
"Median", "75th percentile", "Maximum")
246259
val dataRows: Seq[Seq[String]] = Seq(

streaming/src/main/scala/org/apache/spark/streaming/ui/UIUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ private[spark] object UIUtils {
4545
<script type="text/JavaScript">
4646
<!--
4747
function timedRefresh(timeoutPeriod) {
48-
setTimeout("location.reload(true);",timeoutPeriod);
48+
setTimeout("location.reload(true);",timeoutPeriod);
4949
}
5050
// -->
5151
</script>

streaming/src/test/scala/org/apache/spark/streaming/UISuite.scala

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import scala.util.Random
2525

2626
class UISuite extends FunSuite with BeforeAndAfterAll {
2727

28-
test("Testing") {
28+
ignore("Testing") {
2929
runStreaming(1000000)
3030
}
3131

@@ -58,12 +58,4 @@ class FunctionBasedInputDStream[T: ClassTag](
5858
def stop(): Unit = {}
5959

6060
def compute(validTime: Time): Option[RDD[T]] = function(ssc, validTime)
61-
}
62-
63-
64-
65-
66-
67-
68-
69-
61+
}

0 commit comments

Comments
 (0)