Skip to content

Commit ed1c925

Browse files
committed
Merge branch 'main' into df50
2 parents 9963a75 + 0e89a57 commit ed1c925

File tree

68 files changed

+1187
-542
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

68 files changed

+1187
-542
lines changed

.github/actions/java-test/action.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,13 +67,17 @@ runs:
6767
if: ${{ inputs.suites == '' }}
6868
env:
6969
COMET_PARQUET_SCAN_IMPL: ${{ inputs.scan_impl }}
70+
SPARK_LOCAL_HOSTNAME: "localhost"
71+
SPARK_LOCAL_IP: "127.0.0.1"
7072
run: |
7173
MAVEN_OPTS="-Xmx4G -Xms2G -XX:+UnlockDiagnosticVMOptions -XX:+ShowMessageBoxOnError -XX:+HeapDumpOnOutOfMemoryError -XX:ErrorFile=./hs_err_pid%p.log" SPARK_HOME=`pwd` ./mvnw -B clean install ${{ inputs.maven_opts }}
7274
- name: Run specified tests
7375
shell: bash
7476
if: ${{ inputs.suites != '' }}
7577
env:
7678
COMET_PARQUET_SCAN_IMPL: ${{ inputs.scan_impl }}
79+
SPARK_LOCAL_HOSTNAME: "localhost"
80+
SPARK_LOCAL_IP: "127.0.0.1"
7781
run: |
7882
MAVEN_SUITES="$(echo "${{ inputs.suites }}" | paste -sd, -)"
7983
echo "Running with MAVEN_SUITES=$MAVEN_SUITES"

.github/workflows/docker-publish.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ on:
3131
jobs:
3232
docker:
3333
name: Docker
34+
if: ${{ startsWith(github.repository, 'apache/') }}
3435
runs-on: ubuntu-22.04
3536
permissions:
3637
contents: read

.github/workflows/docs.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ name: Deploy DataFusion Comet site
2929
jobs:
3030
build-docs:
3131
name: Build docs
32+
if: ${{ startsWith(github.repository, 'apache/') }}
3233
runs-on: ubuntu-latest
3334
steps:
3435
- name: Checkout docs sources
@@ -41,7 +42,7 @@ jobs:
4142
path: asf-site
4243

4344
- name: Setup Python
44-
uses: actions/setup-python@v5
45+
uses: actions/setup-python@v6
4546
with:
4647
python-version: "3.10"
4748

.github/workflows/pr_build_linux.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,7 @@ jobs:
146146
org.apache.spark.sql.comet.CometTaskMetricsSuite
147147
org.apache.comet.CometBitwiseExpressionSuite
148148
org.apache.comet.CometMapExpressionSuite
149+
org.apache.comet.objectstore.NativeConfigSuite
149150
fail-fast: false
150151
name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name }}]
151152
runs-on: ${{ matrix.os }}

.github/workflows/spark_sql_test.yml

Lines changed: 76 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,12 @@ on:
3535
# manual trigger
3636
# https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
3737
workflow_dispatch:
38+
inputs:
39+
collect-fallback-logs:
40+
description: 'Whether to collect Comet fallback reasons from spark sql unit test logs'
41+
required: false
42+
default: 'false'
43+
type: boolean
3844

3945
env:
4046
RUST_VERSION: stable
@@ -48,12 +54,12 @@ jobs:
4854
spark-version: [{short: '3.4', full: '3.4.3'}, {short: '3.5', full: '3.5.6'}]
4955
module:
5056
- {name: "catalyst", args1: "catalyst/test", args2: ""}
51-
- {name: "sql/core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
52-
- {name: "sql/core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest"}
53-
- {name: "sql/core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest"}
54-
- {name: "sql/hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
55-
- {name: "sql/hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest"}
56-
- {name: "sql/hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest"}
57+
- {name: "sql_core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
58+
- {name: "sql_core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest"}
59+
- {name: "sql_core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest"}
60+
- {name: "sql_hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
61+
- {name: "sql_hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest"}
62+
- {name: "sql_hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest"}
5763
fail-fast: false
5864
name: spark-sql-${{ matrix.module.name }}/${{ matrix.os }}/spark-${{ matrix.spark-version.full }}/java-${{ matrix.java-version }}
5965
runs-on: ${{ matrix.os }}
@@ -75,9 +81,19 @@ jobs:
7581
run: |
7682
cd apache-spark
7783
rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
78-
ENABLE_COMET=true build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
84+
ENABLE_COMET=true ENABLE_COMET_LOG_FALLBACK_REASONS=${{ github.event.inputs.collect-fallback-logs || 'false' }} \
85+
build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
86+
if [ "${{ github.event.inputs.collect-fallback-logs }}" = "true" ]; then
87+
find . -type f -name "unit-tests.log" -print0 | xargs -0 grep -h "Comet cannot accelerate" | sed 's/.*Comet cannot accelerate/Comet cannot accelerate/' | sort -u > fallback.log
88+
fi
7989
env:
8090
LC_ALL: "C.UTF-8"
91+
- name: Upload fallback log
92+
if: ${{ github.event.inputs.collect-fallback-logs == 'true' }}
93+
uses: actions/upload-artifact@v4
94+
with:
95+
name: fallback-log-spark-sql-${{ matrix.module.name }}-${{ matrix.os }}-spark-${{ matrix.spark-version.full }}-java-${{ matrix.java-version }}
96+
path: "**/fallback.log"
8197

8298
spark-sql-native-native-comet:
8399
strategy:
@@ -87,12 +103,12 @@ jobs:
87103
spark-version: [ { short: '3.4', full: '3.4.3' }, { short: '3.5', full: '3.5.6' } ]
88104
module:
89105
- { name: "catalyst", args1: "catalyst/test", args2: "" }
90-
- { name: "sql/core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest }
91-
- { name: "sql/core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest" }
92-
- { name: "sql/core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest" }
93-
- { name: "sql/hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest" }
94-
- { name: "sql/hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest" }
95-
- { name: "sql/hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest" }
106+
- { name: "sql_core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest }
107+
- { name: "sql_core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest" }
108+
- { name: "sql_core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest" }
109+
- { name: "sql_hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest" }
110+
- { name: "sql_hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest" }
111+
- { name: "sql_hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest" }
96112
fail-fast: false
97113
name: spark-sql-native-comet-${{ matrix.module.name }}/${{ matrix.os }}/spark-${{ matrix.spark-version.full }}/java-${{ matrix.java-version }}
98114
runs-on: ${{ matrix.os }}
@@ -114,9 +130,19 @@ jobs:
114130
run: |
115131
cd apache-spark
116132
rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
117-
ENABLE_COMET=true COMET_PARQUET_SCAN_IMPL=native_comet build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
133+
ENABLE_COMET=true COMET_PARQUET_SCAN_IMPL=native_comet ENABLE_COMET_LOG_FALLBACK_REASONS=${{ github.event.inputs.collect-fallback-logs || 'false' }} \
134+
build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
135+
if [ "${{ github.event.inputs.collect-fallback-logs }}" = "true" ]; then
136+
find . -type f -name "unit-tests.log" -print0 | xargs -0 grep -h "Comet cannot accelerate" | sed 's/.*Comet cannot accelerate/Comet cannot accelerate/' | sort -u > fallback.log
137+
fi
118138
env:
119139
LC_ALL: "C.UTF-8"
140+
- name: Upload fallback log
141+
if: ${{ github.event.inputs.collect-fallback-logs == 'true' }}
142+
uses: actions/upload-artifact@v4
143+
with:
144+
name: fallback-log-spark-sql-native-comet-${{ matrix.module.name }}-${{ matrix.os }}-spark-${{ matrix.spark-version.full }}-java-${{ matrix.java-version }}
145+
path: "**/fallback.log"
120146

121147
spark-sql-native-iceberg-compat:
122148
strategy:
@@ -126,12 +152,12 @@ jobs:
126152
spark-version: [{short: '3.4', full: '3.4.3'}, {short: '3.5', full: '3.5.6'}]
127153
module:
128154
- {name: "catalyst", args1: "catalyst/test", args2: ""}
129-
- {name: "sql/core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
130-
- {name: "sql/core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest"}
131-
- {name: "sql/core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest"}
132-
- {name: "sql/hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
133-
- {name: "sql/hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest"}
134-
- {name: "sql/hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest"}
155+
- {name: "sql_core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
156+
- {name: "sql_core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest"}
157+
- {name: "sql_core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest"}
158+
- {name: "sql_hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
159+
- {name: "sql_hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest"}
160+
- {name: "sql_hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest"}
135161
fail-fast: false
136162
name: spark-sql-iceberg-compat-${{ matrix.module.name }}/${{ matrix.os }}/spark-${{ matrix.spark-version.full }}/java-${{ matrix.java-version }}
137163
runs-on: ${{ matrix.os }}
@@ -153,6 +179,35 @@ jobs:
153179
run: |
154180
cd apache-spark
155181
rm -rf /root/.m2/repository/org/apache/parquet # somehow parquet cache requires cleanups
156-
ENABLE_COMET=true COMET_PARQUET_SCAN_IMPL=native_iceberg_compat build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
182+
ENABLE_COMET=true COMET_PARQUET_SCAN_IMPL=native_iceberg_compat ENABLE_COMET_LOG_FALLBACK_REASONS=${{ github.event.inputs.collect-fallback-logs || 'false' }} \
183+
build/sbt -Dsbt.log.noformat=true ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
184+
if [ "${{ github.event.inputs.collect-fallback-logs }}" = "true" ]; then
185+
find . -type f -name "unit-tests.log" -print0 | xargs -0 grep -h "Comet cannot accelerate" | sed 's/.*Comet cannot accelerate/Comet cannot accelerate/' | sort -u > fallback.log
186+
fi
157187
env:
158188
LC_ALL: "C.UTF-8"
189+
- name: Upload fallback log
190+
if: ${{ github.event.inputs.collect-fallback-logs == 'true' }}
191+
uses: actions/upload-artifact@v4
192+
with:
193+
name: fallback-log-spark-sql-iceberg-compat-${{ matrix.module.name }}-${{ matrix.os }}-spark-${{ matrix.spark-version.full }}-java-${{ matrix.java-version }}
194+
path: "**/fallback.log"
195+
196+
merge-fallback-logs:
197+
if: ${{ github.event.inputs.collect-fallback-logs == 'true' }}
198+
name: merge-fallback-logs
199+
needs: [ spark-sql-auto-scan, spark-sql-native-native-comet, spark-sql-native-iceberg-compat ]
200+
runs-on: ubuntu-24.04
201+
steps:
202+
- name: Download fallback log artifacts
203+
uses: actions/download-artifact@v5
204+
with:
205+
path: fallback-logs/
206+
- name: Merge fallback logs
207+
run: |
208+
find ./fallback-logs/ -type f -name "fallback.log" -print0 | xargs -0 cat | sort -u > all_fallback.log
209+
- name: Upload merged fallback log
210+
uses: actions/upload-artifact@v4
211+
with:
212+
name: all-fallback-log
213+
path: all_fallback.log

common/src/main/java/org/apache/comet/vector/CometVector.java

Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -143,66 +143,66 @@ public byte[] copyBinaryDecimal(int i, byte[] dest) {
143143

144144
@Override
145145
public boolean getBoolean(int rowId) {
146-
throw new UnsupportedOperationException("Not yet supported");
146+
throw notImplementedException();
147147
}
148148

149149
@Override
150150
public byte getByte(int rowId) {
151-
throw new UnsupportedOperationException("Not yet supported");
151+
throw notImplementedException();
152152
}
153153

154154
@Override
155155
public short getShort(int rowId) {
156-
throw new UnsupportedOperationException("Not yet supported");
156+
throw notImplementedException();
157157
}
158158

159159
@Override
160160
public int getInt(int rowId) {
161-
throw new UnsupportedOperationException("Not yet supported");
161+
throw notImplementedException();
162162
}
163163

164164
@Override
165165
public long getLong(int rowId) {
166-
throw new UnsupportedOperationException("Not yet supported");
166+
throw notImplementedException();
167167
}
168168

169169
public long getLongDecimal(int rowId) {
170-
throw new UnsupportedOperationException("Not yet supported");
170+
throw notImplementedException();
171171
}
172172

173173
@Override
174174
public float getFloat(int rowId) {
175-
throw new UnsupportedOperationException("Not yet supported");
175+
throw notImplementedException();
176176
}
177177

178178
@Override
179179
public double getDouble(int rowId) {
180-
throw new UnsupportedOperationException("Not yet supported");
180+
throw notImplementedException();
181181
}
182182

183183
@Override
184184
public UTF8String getUTF8String(int rowId) {
185-
throw new UnsupportedOperationException("Not yet supported");
185+
throw notImplementedException();
186186
}
187187

188188
@Override
189189
public byte[] getBinary(int rowId) {
190-
throw new UnsupportedOperationException("Not yet supported");
190+
throw notImplementedException();
191191
}
192192

193193
@Override
194194
public ColumnarArray getArray(int i) {
195-
throw new UnsupportedOperationException("Not yet supported");
195+
throw notImplementedException();
196196
}
197197

198198
@Override
199199
public ColumnarMap getMap(int i) {
200-
throw new UnsupportedOperationException("Not yet supported");
200+
throw notImplementedException();
201201
}
202202

203203
@Override
204204
public ColumnVector getChild(int i) {
205-
throw new UnsupportedOperationException("Not yet supported");
205+
throw notImplementedException();
206206
}
207207

208208
@Override
@@ -261,4 +261,9 @@ public static CometVector getVector(
261261
protected static CometVector getVector(ValueVector vector, boolean useDecimal128) {
262262
return getVector(vector, useDecimal128, null);
263263
}
264+
265+
private UnsupportedOperationException notImplementedException() {
266+
return new UnsupportedOperationException(
267+
"CometVector subclass " + this.getClass().getName() + " does not implement this method");
268+
}
264269
}

common/src/main/scala/org/apache/comet/CometConf.scala

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -486,7 +486,8 @@ object CometConf extends ShimCometConf {
486486
conf("spark.comet.logFallbackReasons.enabled")
487487
.doc("When this setting is enabled, Comet will log warnings for all fallback reasons.")
488488
.booleanConf
489-
.createWithDefault(false)
489+
.createWithDefault(
490+
sys.env.getOrElse("ENABLE_COMET_LOG_FALLBACK_REASONS", "false").toBoolean)
490491

491492
val COMET_EXPLAIN_FALLBACK_ENABLED: ConfigEntry[Boolean] =
492493
conf("spark.comet.explainFallback.enabled")
@@ -624,14 +625,6 @@ object CometConf extends ShimCometConf {
624625
.booleanConf
625626
.createWithDefault(false)
626627

627-
val COMET_CAST_ALLOW_INCOMPATIBLE: ConfigEntry[Boolean] =
628-
conf("spark.comet.cast.allowIncompatible")
629-
.doc(
630-
"Comet is not currently fully compatible with Spark for all cast operations. " +
631-
s"Set this config to true to allow them anyway. $COMPAT_GUIDE.")
632-
.booleanConf
633-
.createWithDefault(false)
634-
635628
val COMET_REGEXP_ALLOW_INCOMPATIBLE: ConfigEntry[Boolean] =
636629
conf("spark.comet.regexp.allowIncompatible")
637630
.doc(
@@ -648,6 +641,16 @@ object CometConf extends ShimCometConf {
648641
.longConf
649642
.createWithDefault(3000L)
650643

644+
val COMET_LIBHDFS_SCHEMES_KEY = "fs.comet.libhdfs.schemes"
645+
646+
val COMET_LIBHDFS_SCHEMES: OptionalConfigEntry[String] =
647+
conf(s"spark.hadoop.$COMET_LIBHDFS_SCHEMES_KEY")
648+
.doc(
649+
"Defines filesystem schemes (e.g., hdfs, webhdfs) that the native side accesses " +
650+
"via libhdfs, separated by commas. Valid only when built with hdfs feature enabled.")
651+
.stringConf
652+
.createOptional
653+
651654
/** Create a config to enable a specific operator */
652655
private def createExecEnabledConfig(
653656
exec: String,

common/src/main/scala/org/apache/comet/objectstore/NativeConfig.scala

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,11 @@ package org.apache.comet.objectstore
2222
import java.net.URI
2323
import java.util.Locale
2424

25+
import org.apache.commons.lang3.StringUtils
2526
import org.apache.hadoop.conf.Configuration
2627

28+
import org.apache.comet.CometConf.COMET_LIBHDFS_SCHEMES_KEY
29+
2730
object NativeConfig {
2831

2932
private val objectStoreConfigPrefixes = Map(
@@ -55,16 +58,22 @@ object NativeConfig {
5558
def extractObjectStoreOptions(hadoopConf: Configuration, uri: URI): Map[String, String] = {
5659
val scheme = uri.getScheme.toLowerCase(Locale.ROOT)
5760

61+
import scala.collection.JavaConverters._
62+
val options = scala.collection.mutable.Map[String, String]()
63+
64+
// The schemes will use libhdfs
65+
val libhdfsSchemes = hadoopConf.get(COMET_LIBHDFS_SCHEMES_KEY)
66+
if (StringUtils.isNotBlank(libhdfsSchemes)) {
67+
options(COMET_LIBHDFS_SCHEMES_KEY) = libhdfsSchemes
68+
}
69+
5870
// Get prefixes for this scheme, return early if none found
5971
val prefixes = objectStoreConfigPrefixes.get(scheme)
6072
if (prefixes.isEmpty) {
61-
return Map.empty[String, String]
73+
return options.toMap
6274
}
6375

64-
import scala.collection.JavaConverters._
65-
6676
// Extract all configurations that match the object store prefixes
67-
val options = scala.collection.mutable.Map[String, String]()
6877
hadoopConf.iterator().asScala.foreach { entry =>
6978
val key = entry.getKey
7079
val value = entry.getValue

dev/benchmarks/comet-tpcds.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ $SPARK_HOME/bin/spark-submit \
4040
--conf spark.executor.extraClassPath=$COMET_JAR \
4141
--conf spark.plugins=org.apache.spark.CometPlugin \
4242
--conf spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager \
43-
--conf spark.comet.cast.allowIncompatible=true \
43+
--conf spark.comet.expression.allowIncompatible=true \
4444
tpcbench.py \
4545
--name comet \
4646
--benchmark tpcds \

dev/benchmarks/comet-tpch.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ $SPARK_HOME/bin/spark-submit \
4141
--conf spark.plugins=org.apache.spark.CometPlugin \
4242
--conf spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager \
4343
--conf spark.comet.exec.replaceSortMergeJoin=true \
44-
--conf spark.comet.cast.allowIncompatible=true \
44+
--conf spark.comet.expression.allowIncompatible=true \
4545
tpcbench.py \
4646
--name comet \
4747
--benchmark tpch \

0 commit comments

Comments
 (0)