Skip to content

Commit 98195c3

Browse files
Sephiroth-Linshivaram
authored andcommitted
[SPARK-7526] [SPARKR] Specify ip of RBackend, MonitorServer and RRDD Socket server
These R process only used to communicate with JVM process on local, so binding to localhost is more reasonable then wildcard ip. Author: linweizhong <linweizhong@huawei.com> Closes apache#6053 from Sephiroth-Lin/spark-7526 and squashes the following commits: 5303af7 [linweizhong] bind to localhost rather than wildcard ip
1 parent df9b94a commit 98195c3

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

core/src/main/scala/org/apache/spark/api/r/RBackend.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.api.r
1919

2020
import java.io.{DataOutputStream, File, FileOutputStream, IOException}
21-
import java.net.{InetSocketAddress, ServerSocket}
21+
import java.net.{InetAddress, InetSocketAddress, ServerSocket}
2222
import java.util.concurrent.TimeUnit
2323

2424
import io.netty.bootstrap.ServerBootstrap
@@ -65,7 +65,7 @@ private[spark] class RBackend {
6565
}
6666
})
6767

68-
channelFuture = bootstrap.bind(new InetSocketAddress(0))
68+
channelFuture = bootstrap.bind(new InetSocketAddress("localhost", 0))
6969
channelFuture.syncUninterruptibly()
7070
channelFuture.channel().localAddress().asInstanceOf[InetSocketAddress].getPort()
7171
}
@@ -101,7 +101,7 @@ private[spark] object RBackend extends Logging {
101101
try {
102102
// bind to random port
103103
val boundPort = sparkRBackend.init()
104-
val serverSocket = new ServerSocket(0, 1)
104+
val serverSocket = new ServerSocket(0, 1, InetAddress.getByName("localhost"))
105105
val listenPort = serverSocket.getLocalPort()
106106

107107
// tell the R process via temporary file

core/src/main/scala/org/apache/spark/api/r/RRDD.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.api.r
1919

2020
import java.io._
21-
import java.net.ServerSocket
21+
import java.net.{InetAddress, ServerSocket}
2222
import java.util.{Map => JMap}
2323

2424
import scala.collection.JavaConversions._
@@ -55,7 +55,7 @@ private abstract class BaseRRDD[T: ClassTag, U: ClassTag](
5555
val parentIterator = firstParent[T].iterator(partition, context)
5656

5757
// we expect two connections
58-
val serverSocket = new ServerSocket(0, 2)
58+
val serverSocket = new ServerSocket(0, 2, InetAddress.getByName("localhost"))
5959
val listenPort = serverSocket.getLocalPort()
6060

6161
// The stdout/stderr is shared by multiple tasks, because we use one daemon
@@ -414,7 +414,7 @@ private[r] object RRDD {
414414
synchronized {
415415
if (daemonChannel == null) {
416416
// we expect one connections
417-
val serverSocket = new ServerSocket(0, 1)
417+
val serverSocket = new ServerSocket(0, 1, InetAddress.getByName("localhost"))
418418
val daemonPort = serverSocket.getLocalPort
419419
errThread = createRProcess(rLibDir, daemonPort, "daemon.R")
420420
// the socket used to send out the input of task

0 commit comments

Comments
 (0)