Skip to content

Commit

Permalink
Revert "Spark 3.4, 3.5: Iceberg / DataFusion Comet integration (apach…
Browse files Browse the repository at this point in the history
…e#12147)"

This reverts commit c5822c4.
  • Loading branch information
pvary committed Feb 3, 2025
1 parent 507e2a9 commit 2dcb297
Show file tree
Hide file tree
Showing 21 changed files with 28 additions and 941 deletions.
4 changes: 2 additions & 2 deletions spark/v3.4/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") {
exclude group: 'org.roaringbitmap'
}

compileOnly "org.apache.datafusion:comet-spark-spark${sparkMajorVersion}_${scalaVersion}:0.5.0"
compileOnly "org.apache.datafusion:comet-spark-spark${sparkMajorVersion}_${scalaVersion}:0.3.0"

implementation libs.parquet.column
implementation libs.parquet.hadoop
Expand Down Expand Up @@ -185,7 +185,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer
testImplementation libs.avro.avro
testImplementation libs.parquet.hadoop
testImplementation libs.junit.vintage.engine
testImplementation "org.apache.datafusion:comet-spark-spark${sparkMajorVersion}_${scalaVersion}:0.5.0"
testImplementation "org.apache.datafusion:comet-spark-spark${sparkMajorVersion}_${scalaVersion}:0.3.0"

// Required because we remove antlr plugin dependencies from the compile configuration, see note above
runtimeOnly libs.antlr.runtime
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ private static class DeleteColumnReader extends MetadataColumnReader {
DataTypes.BooleanType,
TypeUtil.convertToParquet(
new StructField("_deleted", DataTypes.BooleanType, false, Metadata.empty())),
false /* useDecimal128 = false */,
false /* isConstant */);
false /* useDecimal128 = false */);
this.isDeleted = new boolean[0];
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,7 @@ private static class PositionColumnReader extends MetadataColumnReader {
private long position;

PositionColumnReader(ColumnDescriptor descriptor) {
super(
DataTypes.LongType,
descriptor,
false /* useDecimal128 = false */,
false /* isConstant */);
super(DataTypes.LongType, descriptor, false /* useDecimal128 = false */);
}

@Override
Expand Down
5 changes: 0 additions & 5 deletions spark/v3.5/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,6 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") {
dependencies {
implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow')
api project(':iceberg-api')
annotationProcessor libs.immutables.value
compileOnly libs.immutables.value
implementation project(':iceberg-common')
implementation project(':iceberg-core')
implementation project(':iceberg-data')
Expand All @@ -75,8 +73,6 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") {
exclude group: 'org.roaringbitmap'
}

compileOnly "org.apache.datafusion:comet-spark-spark${sparkMajorVersion}_${scalaVersion}:0.5.0"

implementation libs.parquet.column
implementation libs.parquet.hadoop

Expand Down Expand Up @@ -183,7 +179,6 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer
testImplementation libs.avro.avro
testImplementation libs.parquet.hadoop
testImplementation libs.awaitility
testImplementation "org.apache.datafusion:comet-spark-spark${sparkMajorVersion}_${scalaVersion}:0.5.0"

// Required because we remove antlr plugin dependencies from the compile configuration, see note above
runtimeOnly libs.antlr.runtime
Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -355,12 +355,4 @@ public boolean reportColumnStats() {
.defaultValue(SparkSQLProperties.REPORT_COLUMN_STATS_DEFAULT)
.parse();
}

public ParquetReaderType parquetReaderType() {
return confParser
.enumConf(ParquetReaderType::fromString)
.sessionConf(SparkSQLProperties.PARQUET_READER_TYPE)
.defaultValue(SparkSQLProperties.PARQUET_READER_TYPE_DEFAULT)
.parse();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,6 @@ private SparkSQLProperties() {}
// Controls whether vectorized reads are enabled
public static final String VECTORIZATION_ENABLED = "spark.sql.iceberg.vectorization.enabled";

// Controls which Parquet reader implementation to use
public static final String PARQUET_READER_TYPE = "spark.sql.iceberg.parquet.reader-type";
public static final ParquetReaderType PARQUET_READER_TYPE_DEFAULT = ParquetReaderType.ICEBERG;
// Controls whether to perform the nullability check during writes
public static final String CHECK_NULLABILITY = "spark.sql.iceberg.check-nullability";
public static final boolean CHECK_NULLABILITY_DEFAULT = true;
Expand Down

This file was deleted.

Loading

0 comments on commit 2dcb297

Please sign in to comment.