Skip to content

Commit b2e8ef3

Browse files
committed
Merge remote-tracking branch 'origin/master' into countDistinctPartial
2 parents fae38f4 + 0ea46ac commit b2e8ef3

File tree

83 files changed

+2921
-1553
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

83 files changed

+2921
-1553
lines changed

README.md

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -118,11 +118,7 @@ If your project is built with Maven, add this to your POM file's `<dependencies>
118118
## A Note About Thrift JDBC server and CLI for Spark SQL
119119

120120
Spark SQL supports Thrift JDBC server and CLI.
121-
See sql-programming-guide.md for more information about those features.
122-
You can use those features by setting `-Phive-thriftserver` when building Spark as follows.
123-
124-
$ sbt/sbt -Phive-thriftserver assembly
125-
121+
See sql-programming-guide.md for more information about using the JDBC server.
126122

127123
## Configuration
128124

assembly/pom.xml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -163,11 +163,6 @@
163163
<artifactId>spark-hive_${scala.binary.version}</artifactId>
164164
<version>${project.version}</version>
165165
</dependency>
166-
</dependencies>
167-
</profile>
168-
<profile>
169-
<id>hive-thriftserver</id>
170-
<dependencies>
171166
<dependency>
172167
<groupId>org.apache.spark</groupId>
173168
<artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
4545
/** Create a SparkConf that loads defaults from system properties and the classpath */
4646
def this() = this(true)
4747

48-
private val settings = new HashMap[String, String]()
48+
private[spark] val settings = new HashMap[String, String]()
4949

5050
if (loadDefaults) {
5151
// Load any spark.* system properties
@@ -210,6 +210,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
210210
new SparkConf(false).setAll(settings)
211211
}
212212

213+
/**
214+
* By using this instead of System.getenv(), environment variables can be mocked
215+
* in unit tests.
216+
*/
217+
private[spark] def getenv(name: String): String = System.getenv(name)
218+
213219
/** Checks for illegal or deprecated config settings. Throws an exception for the former. Not
214220
* idempotent - may mutate this conf object to convert deprecated settings to supported ones. */
215221
private[spark] def validateSettings() {
@@ -227,7 +233,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
227233
// Validate spark.executor.extraJavaOptions
228234
settings.get(executorOptsKey).map { javaOpts =>
229235
if (javaOpts.contains("-Dspark")) {
230-
val msg = s"$executorOptsKey is not allowed to set Spark options (was '$javaOpts)'. " +
236+
val msg = s"$executorOptsKey is not allowed to set Spark options (was '$javaOpts'). " +
231237
"Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit."
232238
throw new Exception(msg)
233239
}

core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ private[spark] class PythonRDD(
6262
val env = SparkEnv.get
6363
val localdir = env.blockManager.diskBlockManager.localDirs.map(
6464
f => f.getPath()).mkString(",")
65-
envVars += ("SPARK_LOCAL_DIR" -> localdir) // it's also used in monitor thread
65+
envVars += ("SPARK_LOCAL_DIRS" -> localdir) // it's also used in monitor thread
6666
val worker: Socket = env.createPythonWorker(pythonExec, envVars.toMap)
6767

6868
// Start a thread to feed the process input from our parent's iterator

core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,19 @@ import org.apache.spark.annotation.DeveloperApi
3232
*/
3333
@DeveloperApi
3434
trait BroadcastFactory {
35+
3536
def initialize(isDriver: Boolean, conf: SparkConf, securityMgr: SecurityManager): Unit
37+
38+
/**
39+
* Creates a new broadcast variable.
40+
*
41+
* @param value value to broadcast
42+
* @param isLocal whether we are in local mode (single JVM process)
43+
* @param id unique id representing this broadcast variable
44+
*/
3645
def newBroadcast[T: ClassTag](value: T, isLocal: Boolean, id: Long): Broadcast[T]
46+
3747
def unbroadcast(id: Long, removeFromDriver: Boolean, blocking: Boolean): Unit
48+
3849
def stop(): Unit
3950
}

0 commit comments

Comments
 (0)