Skip to content

Commit

Permalink
[KYUUBI #6554] Delete redundant code related to zorder
Browse files Browse the repository at this point in the history
# 🔍 Description
## Issue References 🔗

This pull request fixes #6554

## Describe Your Solution 🔧

- Delete `/kyuubi/extensions/spark/kyuubi-extension-spark-3-x/src/main/scala/org/apache/kyuubi/sql/zorder/InsertZorderBeforeWritingBase.scala` file
- Rename `InsertZorderBeforeWriting33.scala` to `InsertZorderBeforeWriting.scala`
- Rename `InsertZorderHelper33,  InsertZorderBeforeWritingDatasource33,  InsertZorderBeforeWritingHive33, ZorderSuiteSpark33` to `InsertZorderHelper,  InsertZorderBeforeWritingDatasource,  InsertZorderBeforeWritingHive, ZorderSuiteSpark`

## Types of changes 🔖

- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)

## Test Plan 🧪

#### Behavior Without This Pull Request ⚰️

#### Behavior With This Pull Request 🎉

#### Related Unit Tests

---

# Checklist 📝

- [x] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)

**Be nice. Be informative.**

Closes #6555 from huangxiaopingRD/6554.

Closes #6554

26de4fa [huangxiaoping] [KYUUBI #6554] Delete redundant code related to zorder

Authored-by: huangxiaoping <1754789345@qq.com>
Signed-off-by: Cheng Pan <chengpan@apache.org>
  • Loading branch information
huangxiaopingRD authored and pan3793 committed Jul 23, 2024
1 parent 3aaa1d6 commit 0f6d764
Show file tree
Hide file tree
Showing 10 changed files with 39 additions and 525 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.kyuubi.sql

import org.apache.spark.sql.SparkSessionExtensions

import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource33, InsertZorderBeforeWritingHive33, ResolveZorder}
import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource, InsertZorderBeforeWritingHive, ResolveZorder}

class KyuubiSparkSQLCommonExtension extends (SparkSessionExtensions => Unit) {
override def apply(extensions: SparkSessionExtensions): Unit = {
Expand All @@ -38,8 +38,8 @@ object KyuubiSparkSQLCommonExtension {
// should be applied before
// RepartitionBeforeWriting and RebalanceBeforeWriting
// because we can only apply one of them (i.e. Global Sort or Repartition/Rebalance)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource33)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive33)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive)
extensions.injectPostHocResolutionRule(FinalStageConfigIsolationCleanRule)

extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,11 @@ import org.apache.spark.sql.hive.execution.{CreateHiveTableAsSelectCommand, Inse

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}

trait InsertZorderHelper33 extends Rule[LogicalPlan] with ZorderBuilder {
trait ZorderBuilder {
def buildZorder(children: Seq[Expression]): ZorderBase
}

trait InsertZorderHelper extends Rule[LogicalPlan] with ZorderBuilder {
private val KYUUBI_ZORDER_ENABLED = "kyuubi.zorder.enabled"
private val KYUUBI_ZORDER_COLS = "kyuubi.zorder.cols"

Expand Down Expand Up @@ -140,8 +144,8 @@ trait InsertZorderHelper33 extends Rule[LogicalPlan] with ZorderBuilder {
}
}

case class InsertZorderBeforeWritingDatasource33(session: SparkSession)
extends InsertZorderHelper33 {
case class InsertZorderBeforeWritingDatasource(session: SparkSession)
extends InsertZorderHelper {
override def applyInternal(plan: LogicalPlan): LogicalPlan = plan match {
case insert: InsertIntoHadoopFsRelationCommand
if insert.query.resolved &&
Expand Down Expand Up @@ -172,8 +176,8 @@ case class InsertZorderBeforeWritingDatasource33(session: SparkSession)
}
}

case class InsertZorderBeforeWritingHive33(session: SparkSession)
extends InsertZorderHelper33 {
case class InsertZorderBeforeWritingHive(session: SparkSession)
extends InsertZorderHelper {
override def applyInternal(plan: LogicalPlan): LogicalPlan = plan match {
case insert: InsertIntoHiveTable
if insert.query.resolved &&
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.kyuubi.sql.{KyuubiSQLConf, SparkKyuubiSparkSQLParser}
import org.apache.kyuubi.sql.zorder.Zorder

trait ZorderSuiteSpark33 extends ZorderSuiteBase {
trait ZorderSuiteSpark extends ZorderSuiteBase {

test("Add rebalance before zorder") {
Seq("true" -> false, "false" -> true).foreach { case (useOriginalOrdering, zorder) =>
Expand Down Expand Up @@ -115,10 +115,10 @@ trait ParserSuite { self: ZorderSuiteBase =>

class ZorderWithCodegenEnabledSuite
extends ZorderWithCodegenEnabledSuiteBase
with ZorderSuiteSpark33
with ZorderSuiteSpark
with ParserSuite {}

class ZorderWithCodegenDisabledSuite
extends ZorderWithCodegenDisabledSuiteBase
with ZorderSuiteSpark33
with ZorderSuiteSpark
with ParserSuite {}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.kyuubi.sql

import org.apache.spark.sql.SparkSessionExtensions

import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource33, InsertZorderBeforeWritingHive33, ResolveZorder}
import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource, InsertZorderBeforeWritingHive, ResolveZorder}

class KyuubiSparkSQLCommonExtension extends (SparkSessionExtensions => Unit) {
override def apply(extensions: SparkSessionExtensions): Unit = {
Expand All @@ -38,8 +38,8 @@ object KyuubiSparkSQLCommonExtension {
// should be applied before
// RepartitionBeforeWriting and RebalanceBeforeWriting
// because we can only apply one of them (i.e. Global Sort or Repartition/Rebalance)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource33)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive33)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive)
extensions.injectPostHocResolutionRule(FinalStageConfigIsolationCleanRule)

extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,11 @@ import org.apache.spark.sql.hive.execution.InsertIntoHiveTable

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}

trait InsertZorderHelper33 extends Rule[LogicalPlan] with ZorderBuilder {
trait ZorderBuilder {
def buildZorder(children: Seq[Expression]): ZorderBase
}

trait InsertZorderHelper extends Rule[LogicalPlan] with ZorderBuilder {
private val KYUUBI_ZORDER_ENABLED = "kyuubi.zorder.enabled"
private val KYUUBI_ZORDER_COLS = "kyuubi.zorder.cols"

Expand Down Expand Up @@ -139,8 +143,8 @@ trait InsertZorderHelper33 extends Rule[LogicalPlan] with ZorderBuilder {
}
}

case class InsertZorderBeforeWritingDatasource33(session: SparkSession)
extends InsertZorderHelper33 {
case class InsertZorderBeforeWritingDatasource(session: SparkSession)
extends InsertZorderHelper {
override def applyInternal(plan: LogicalPlan): LogicalPlan = plan match {
case insert: InsertIntoHadoopFsRelationCommand
if insert.query.resolved &&
Expand All @@ -159,8 +163,8 @@ case class InsertZorderBeforeWritingDatasource33(session: SparkSession)
}
}

case class InsertZorderBeforeWritingHive33(session: SparkSession)
extends InsertZorderHelper33 {
case class InsertZorderBeforeWritingHive(session: SparkSession)
extends InsertZorderHelper {
override def applyInternal(plan: LogicalPlan): LogicalPlan = plan match {
case insert: InsertIntoHiveTable
if insert.query.resolved &&
Expand Down
Loading

0 comments on commit 0f6d764

Please sign in to comment.