Skip to content

Commit

Permalink
[KYUUBI #6554] Delete redundant code related to zorder
Browse files Browse the repository at this point in the history
  • Loading branch information
huangxiaopingRD committed Jul 23, 2024
1 parent ec232c1 commit 26de4fa
Show file tree
Hide file tree
Showing 10 changed files with 39 additions and 525 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.kyuubi.sql

import org.apache.spark.sql.SparkSessionExtensions

import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource33, InsertZorderBeforeWritingHive33, ResolveZorder}
import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource, InsertZorderBeforeWritingHive, ResolveZorder}

class KyuubiSparkSQLCommonExtension extends (SparkSessionExtensions => Unit) {
override def apply(extensions: SparkSessionExtensions): Unit = {
Expand All @@ -38,8 +38,8 @@ object KyuubiSparkSQLCommonExtension {
// should be applied before
// RepartitionBeforeWriting and RebalanceBeforeWriting
// because we can only apply one of them (i.e. Global Sort or Repartition/Rebalance)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource33)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive33)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive)
extensions.injectPostHocResolutionRule(FinalStageConfigIsolationCleanRule)

extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,11 @@ import org.apache.spark.sql.hive.execution.{CreateHiveTableAsSelectCommand, Inse

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}

trait InsertZorderHelper33 extends Rule[LogicalPlan] with ZorderBuilder {
trait ZorderBuilder {
def buildZorder(children: Seq[Expression]): ZorderBase
}

trait InsertZorderHelper extends Rule[LogicalPlan] with ZorderBuilder {
private val KYUUBI_ZORDER_ENABLED = "kyuubi.zorder.enabled"
private val KYUUBI_ZORDER_COLS = "kyuubi.zorder.cols"

Expand Down Expand Up @@ -140,8 +144,8 @@ trait InsertZorderHelper33 extends Rule[LogicalPlan] with ZorderBuilder {
}
}

case class InsertZorderBeforeWritingDatasource33(session: SparkSession)
extends InsertZorderHelper33 {
case class InsertZorderBeforeWritingDatasource(session: SparkSession)
extends InsertZorderHelper {
override def applyInternal(plan: LogicalPlan): LogicalPlan = plan match {
case insert: InsertIntoHadoopFsRelationCommand
if insert.query.resolved &&
Expand Down Expand Up @@ -172,8 +176,8 @@ case class InsertZorderBeforeWritingDatasource33(session: SparkSession)
}
}

case class InsertZorderBeforeWritingHive33(session: SparkSession)
extends InsertZorderHelper33 {
case class InsertZorderBeforeWritingHive(session: SparkSession)
extends InsertZorderHelper {
override def applyInternal(plan: LogicalPlan): LogicalPlan = plan match {
case insert: InsertIntoHiveTable
if insert.query.resolved &&
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.kyuubi.sql.{KyuubiSQLConf, SparkKyuubiSparkSQLParser}
import org.apache.kyuubi.sql.zorder.Zorder

trait ZorderSuiteSpark33 extends ZorderSuiteBase {
trait ZorderSuiteSpark extends ZorderSuiteBase {

test("Add rebalance before zorder") {
Seq("true" -> false, "false" -> true).foreach { case (useOriginalOrdering, zorder) =>
Expand Down Expand Up @@ -115,10 +115,10 @@ trait ParserSuite { self: ZorderSuiteBase =>

class ZorderWithCodegenEnabledSuite
extends ZorderWithCodegenEnabledSuiteBase
with ZorderSuiteSpark33
with ZorderSuiteSpark
with ParserSuite {}

class ZorderWithCodegenDisabledSuite
extends ZorderWithCodegenDisabledSuiteBase
with ZorderSuiteSpark33
with ZorderSuiteSpark
with ParserSuite {}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.kyuubi.sql

import org.apache.spark.sql.SparkSessionExtensions

import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource33, InsertZorderBeforeWritingHive33, ResolveZorder}
import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource, InsertZorderBeforeWritingHive, ResolveZorder}

class KyuubiSparkSQLCommonExtension extends (SparkSessionExtensions => Unit) {
override def apply(extensions: SparkSessionExtensions): Unit = {
Expand All @@ -38,8 +38,8 @@ object KyuubiSparkSQLCommonExtension {
// should be applied before
// RepartitionBeforeWriting and RebalanceBeforeWriting
// because we can only apply one of them (i.e. Global Sort or Repartition/Rebalance)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource33)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive33)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource)
extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive)
extensions.injectPostHocResolutionRule(FinalStageConfigIsolationCleanRule)

extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,11 @@ import org.apache.spark.sql.hive.execution.InsertIntoHiveTable

import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}

trait InsertZorderHelper33 extends Rule[LogicalPlan] with ZorderBuilder {
trait ZorderBuilder {
def buildZorder(children: Seq[Expression]): ZorderBase
}

trait InsertZorderHelper extends Rule[LogicalPlan] with ZorderBuilder {
private val KYUUBI_ZORDER_ENABLED = "kyuubi.zorder.enabled"
private val KYUUBI_ZORDER_COLS = "kyuubi.zorder.cols"

Expand Down Expand Up @@ -139,8 +143,8 @@ trait InsertZorderHelper33 extends Rule[LogicalPlan] with ZorderBuilder {
}
}

case class InsertZorderBeforeWritingDatasource33(session: SparkSession)
extends InsertZorderHelper33 {
case class InsertZorderBeforeWritingDatasource(session: SparkSession)
extends InsertZorderHelper {
override def applyInternal(plan: LogicalPlan): LogicalPlan = plan match {
case insert: InsertIntoHadoopFsRelationCommand
if insert.query.resolved &&
Expand All @@ -159,8 +163,8 @@ case class InsertZorderBeforeWritingDatasource33(session: SparkSession)
}
}

case class InsertZorderBeforeWritingHive33(session: SparkSession)
extends InsertZorderHelper33 {
case class InsertZorderBeforeWritingHive(session: SparkSession)
extends InsertZorderHelper {
override def applyInternal(plan: LogicalPlan): LogicalPlan = plan match {
case insert: InsertIntoHiveTable
if insert.query.resolved &&
Expand Down
Loading

0 comments on commit 26de4fa

Please sign in to comment.