Skip to content

Commit bb8a13b

Browse files
authored
Merge branch 'master' into csv-line-sep
2 parents 12022ad + a480a62 commit bb8a13b

File tree

281 files changed

+4724
-3902
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

281 files changed

+4724
-3902
lines changed

R/pkg/R/DataFrame.R

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -767,6 +767,14 @@ setMethod("repartition",
767767
#' using \code{spark.sql.shuffle.partitions} as number of partitions.}
768768
#'}
769769
#'
770+
#' At least one partition-by expression must be specified.
771+
#' When no explicit sort order is specified, "ascending nulls first" is assumed.
772+
#'
773+
#' Note that due to performance reasons this method uses sampling to estimate the ranges.
774+
#' Hence, the output may not be consistent, since sampling can return different values.
775+
#' The sample size can be controlled by the config
776+
#' \code{spark.sql.execution.rangeExchange.sampleSizePerPartition}.
777+
#'
770778
#' @param x a SparkDataFrame.
771779
#' @param numPartitions the number of partitions to use.
772780
#' @param col the column by which the range partitioning will be performed.

R/pkg/R/functions.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3370,7 +3370,7 @@ setMethod("flatten",
33703370
#'
33713371
#' @rdname column_collection_functions
33723372
#' @aliases map_entries map_entries,Column-method
3373-
#' @note map_entries since 2.4.0
3373+
#' @note map_entries since 3.0.0
33743374
setMethod("map_entries",
33753375
signature(x = "Column"),
33763376
function(x) {

R/pkg/tests/fulltests/test_streaming.R

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -127,6 +127,7 @@ test_that("Specify a schema by using a DDL-formatted string when reading", {
127127
expect_false(awaitTermination(q, 5 * 1000))
128128
callJMethod(q@ssq, "processAllAvailable")
129129
expect_equal(head(sql("SELECT count(*) FROM people3"))[[1]], 3)
130+
stopQuery(q)
130131

131132
expect_error(read.stream(path = parquetPath, schema = "name stri"),
132133
"DataType stri is not supported.")

bin/load-spark-env.sh

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -26,35 +26,40 @@ if [ -z "${SPARK_HOME}" ]; then
2626
source "$(dirname "$0")"/find-spark-home
2727
fi
2828

29+
SPARK_ENV_SH="spark-env.sh"
2930
if [ -z "$SPARK_ENV_LOADED" ]; then
3031
export SPARK_ENV_LOADED=1
3132

3233
export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}"/conf}"
3334

34-
if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
35+
SPARK_ENV_SH="${SPARK_CONF_DIR}/${SPARK_ENV_SH}"
36+
if [[ -f "${SPARK_ENV_SH}" ]]; then
3537
# Promote all variable declarations to environment (exported) variables
3638
set -a
37-
. "${SPARK_CONF_DIR}/spark-env.sh"
39+
. ${SPARK_ENV_SH}
3840
set +a
3941
fi
4042
fi
4143

4244
# Setting SPARK_SCALA_VERSION if not already set.
4345

4446
if [ -z "$SPARK_SCALA_VERSION" ]; then
47+
SCALA_VERSION_1=2.12
48+
SCALA_VERSION_2=2.11
4549

46-
ASSEMBLY_DIR2="${SPARK_HOME}/assembly/target/scala-2.11"
47-
ASSEMBLY_DIR1="${SPARK_HOME}/assembly/target/scala-2.12"
48-
49-
if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
50-
echo -e "Presence of build for multiple Scala versions detected." 1>&2
51-
echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION in spark-env.sh.' 1>&2
50+
ASSEMBLY_DIR_1="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_1}"
51+
ASSEMBLY_DIR_2="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_2}"
52+
ENV_VARIABLE_DOC="https://spark.apache.org/docs/latest/configuration.html#environment-variables"
53+
if [[ -d "$ASSEMBLY_DIR_1" && -d "$ASSEMBLY_DIR_2" ]]; then
54+
echo "Presence of build for multiple Scala versions detected ($ASSEMBLY_DIR_1 and $ASSEMBLY_DIR_2)." 1>&2
55+
echo "Remove one of them or, export SPARK_SCALA_VERSION=$SCALA_VERSION_1 in ${SPARK_ENV_SH}." 1>&2
56+
echo "Visit ${ENV_VARIABLE_DOC} for more details about setting environment variables in spark-env.sh." 1>&2
5257
exit 1
5358
fi
5459

55-
if [ -d "$ASSEMBLY_DIR2" ]; then
56-
export SPARK_SCALA_VERSION="2.11"
60+
if [[ -d "$ASSEMBLY_DIR_1" ]]; then
61+
export SPARK_SCALA_VERSION=${SCALA_VERSION_1}
5762
else
58-
export SPARK_SCALA_VERSION="2.12"
63+
export SPARK_SCALA_VERSION=${SCALA_VERSION_2}
5964
fi
6065
fi

common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBIterator.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -196,6 +196,7 @@ public synchronized void close() throws IOException {
196196
* when Scala wrappers are used, this makes sure that, hopefully, the JNI resources held by
197197
* the iterator will eventually be released.
198198
*/
199+
@SuppressWarnings("deprecation")
199200
@Override
200201
protected void finalize() throws Throwable {
201202
db.closeIterator(this);

common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ public ChunkFetchFailure(StreamChunkId streamChunkId, String errorString) {
3333
}
3434

3535
@Override
36-
public Type type() { return Type.ChunkFetchFailure; }
36+
public Message.Type type() { return Type.ChunkFetchFailure; }
3737

3838
@Override
3939
public int encodedLength() {

common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ public ChunkFetchRequest(StreamChunkId streamChunkId) {
3232
}
3333

3434
@Override
35-
public Type type() { return Type.ChunkFetchRequest; }
35+
public Message.Type type() { return Type.ChunkFetchRequest; }
3636

3737
@Override
3838
public int encodedLength() {

common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ public ChunkFetchSuccess(StreamChunkId streamChunkId, ManagedBuffer buffer) {
3939
}
4040

4141
@Override
42-
public Type type() { return Type.ChunkFetchSuccess; }
42+
public Message.Type type() { return Type.ChunkFetchSuccess; }
4343

4444
@Override
4545
public int encodedLength() {

common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ public OneWayMessage(ManagedBuffer body) {
3434
}
3535

3636
@Override
37-
public Type type() { return Type.OneWayMessage; }
37+
public Message.Type type() { return Type.OneWayMessage; }
3838

3939
@Override
4040
public int encodedLength() {

common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ public RpcFailure(long requestId, String errorString) {
3131
}
3232

3333
@Override
34-
public Type type() { return Type.RpcFailure; }
34+
public Message.Type type() { return Type.RpcFailure; }
3535

3636
@Override
3737
public int encodedLength() {

0 commit comments

Comments
 (0)