Skip to content

Commit

Permalink
[SPARK-40324][SQL] Provide query context in AnalysisException
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?

This is a major PR for extending AnalysisException with query context. After changes, we can:
* migrate analysis errors to error classes
* change the actual error messages of AnalysisException, showing the query context from original query instead of an unresolved logical plan.

### Why are the changes needed?

Make it possible for better error messages in analysis exceptions.

### Does this PR introduce _any_ user-facing change?

No. This is an extension of the framework. We will have more improvements based on this one.

### How was this patch tested?

Existing UT

Closes apache#37841 from gengliangwang/analysisContext.

Authored-by: Gengliang Wang <gengliang@apache.org>
Signed-off-by: Max Gekk <max.gekk@gmail.com>
  • Loading branch information
gengliangwang authored and MaxGekk committed Sep 10, 2022
1 parent 0996a15 commit bf5103a
Show file tree
Hide file tree
Showing 55 changed files with 5,178 additions and 692 deletions.
10 changes: 10 additions & 0 deletions core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -370,6 +370,16 @@ abstract class SparkFunSuite
context: QueryContext): Unit =
checkError(exception, errorClass, Some(errorSubClass), None, Map.empty, false, Array(context))

protected def checkError(
exception: SparkThrowable,
errorClass: String,
errorSubClass: String,
sqlState: Option[String],
parameters: Map[String, String],
context: QueryContext): Unit =
checkError(exception, errorClass, Some(errorSubClass), sqlState, parameters,
false, Array(context))

protected def checkError(
exception: SparkThrowable,
errorClass: String,
Expand Down
6 changes: 5 additions & 1 deletion project/MimaExcludes.scala
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,11 @@ object MimaExcludes {
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.types.Decimal.fromStringANSI$default$3"),

// [SPARK-36511][MINOR][SQL] Remove ColumnIOUtil
ProblemFilters.exclude[MissingClassProblem]("org.apache.parquet.io.ColumnIOUtil")
ProblemFilters.exclude[MissingClassProblem]("org.apache.parquet.io.ColumnIOUtil"),

// [SPARK-40324][SQL] Provide query context in AnalysisException
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.AnalysisException.copy"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.AnalysisException.withPosition")
)

// Defulat exclude rules
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,8 @@ class AnalysisException protected[sql] (
startPosition = origin.startPosition,
errorClass = Some(errorClass),
errorSubClass = None,
messageParameters = messageParameters)
messageParameters = messageParameters,
context = origin.getQueryContext)

def this(
errorClass: String,
Expand All @@ -115,7 +116,8 @@ class AnalysisException protected[sql] (
startPosition = origin.startPosition,
errorClass = Some(errorClass),
errorSubClass = Option(errorSubClass),
messageParameters = messageParameters)
messageParameters = messageParameters,
context = origin.getQueryContext)

def copy(
message: String = this.message,
Expand All @@ -124,12 +126,16 @@ class AnalysisException protected[sql] (
plan: Option[LogicalPlan] = this.plan,
cause: Option[Throwable] = this.cause,
errorClass: Option[String] = this.errorClass,
messageParameters: Array[String] = this.messageParameters): AnalysisException =
messageParameters: Array[String] = this.messageParameters,
context: Array[QueryContext] = Array.empty): AnalysisException =
new AnalysisException(message, line, startPosition, plan, cause, errorClass, errorSubClass,
messageParameters)
messageParameters, context)

def withPosition(line: Option[Int], startPosition: Option[Int]): AnalysisException = {
val newException = this.copy(line = line, startPosition = startPosition)
def withPosition(origin: Origin): AnalysisException = {
val newException = this.copy(
line = origin.line,
startPosition = origin.startPosition,
context = origin.getQueryContext)
newException.setStackTrace(getStackTrace)
newException
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,8 @@ class Analyzer(override val catalogManager: CatalogManager)
analyzed
} catch {
case e: AnalysisException =>
val ae = e.copy(plan = Option(analyzed))
val ae = e.copy(plan = Option(analyzed),
context = analyzed.origin.getQueryContext)
ae.setStackTrace(e.getStackTrace)
throw ae
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ package object analysis {
def withPosition[A](t: TreeNode[_])(f: => A): A = {
try f catch {
case a: AnalysisException =>
throw a.withPosition(t.origin.line, t.origin.startPosition)
throw a.withPosition(t.origin)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.json4s.JsonAST._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

import org.apache.spark.QueryContext
import org.apache.spark.sql.catalyst.{AliasIdentifier, CatalystIdentifier}
import org.apache.spark.sql.catalyst.ScalaReflection._
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType, FunctionResource}
Expand Down Expand Up @@ -68,6 +69,12 @@ case class Origin(

lazy val context: SQLQueryContext = SQLQueryContext(
line, startPosition, startIndex, stopIndex, sqlText, objectType, objectName)

def getQueryContext: Array[QueryContext] = if (context.isValid) {
Array(context)
} else {
Array.empty
}
}

/**
Expand Down
Loading

0 comments on commit bf5103a

Please sign in to comment.